diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000000..46d619f78f --- /dev/null +++ b/.editorconfig @@ -0,0 +1,30 @@ +root = true + +[*.java] +indent_style = tab +indent_size = 4 + +[*.adoc] +indent_style = tab +indent_size = 4 + +[*.groovy] +indent_style = tab +indent_size = 4 + +[*.xml] +indent_style = tab +indent_size = 4 + +[*.yml] +indent_style = space +indent_size = 2 + +[*.yaml] +indent_style = space +indent_size = 2 + +[*.sh] +indent_style = space +indent_size = 4 +end_of_line = lf diff --git a/.github/actions/build-images/action.yml b/.github/actions/build-images/action.yml index f374662e14..378a9a9c48 100644 --- a/.github/actions/build-images/action.yml +++ b/.github/actions/build-images/action.yml @@ -10,72 +10,34 @@ inputs: dockerhub-password: description: 'dockerhub password' required: true + GCR_JSON_KEY: + description: 'GCR_JSON_KEY' + required: true runs: using: "composite" steps: - name: Install pack uses: jvalkeal/build-zoo-handler@v0.0.4 with: - pack-version: 0.18.0 - - name: Build Images - env: - TAG: ${{ inputs.version }} - shell: bash - run: | - for v in 8 11 17 - do - pack build \ - --path spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-$TAG.jar \ - --builder gcr.io/paketo-buildpacks/builder:base \ - --env BP_JVM_VERSION=$v springcloud/spring-cloud-dataflow-server:$TAG-jdk$v - pack build \ - --path spring-cloud-dataflow-composed-task-runner/target/spring-cloud-dataflow-composed-task-runner-$TAG.jar \ - --builder gcr.io/paketo-buildpacks/builder:base \ - --env BP_JVM_VERSION=$v springcloud/spring-cloud-dataflow-composed-task-runner:$TAG-jdk$v - pack build \ - --path spring-cloud-dataflow-tasklauncher/spring-cloud-dataflow-tasklauncher-sink-kafka/target/spring-cloud-dataflow-tasklauncher-sink-kafka-$TAG.jar \ - --builder gcr.io/paketo-buildpacks/builder:base \ - --env BP_JVM_VERSION=$v springcloud/spring-cloud-dataflow-tasklauncher-sink-kafka:$TAG-jdk$v - pack build \ - --path spring-cloud-dataflow-tasklauncher/spring-cloud-dataflow-tasklauncher-sink-rabbit/target/spring-cloud-dataflow-tasklauncher-sink-rabbit-$TAG.jar \ - --builder gcr.io/paketo-buildpacks/builder:base \ - --env BP_JVM_VERSION=$v springcloud/spring-cloud-dataflow-tasklauncher-sink-rabbit:$TAG-jdk$v - pack build \ - --path spring-cloud-dataflow-single-step-batch-job/target/spring-cloud-dataflow-single-step-batch-job-$TAG.jar \ - --builder gcr.io/paketo-buildpacks/builder:base \ - --env BP_JVM_VERSION=$v springcloud/spring-cloud-dataflow-single-step-batch-job:$TAG-jdk$v - done + pack-version: 0.30.0 - # docker hub login - - uses: docker/login-action@v1 + # docker hub login + - name: Login to docker.io + uses: docker/login-action@v3 with: username: ${{ inputs.dockerhub-username }} password: ${{ inputs.dockerhub-password }} - - # push images - - name: Push images + - name: Login to GCR + uses: docker/login-action@v3 + with: + registry: gcr.io + username: _json_key + password: ${{ inputs.GCR_JSON_KEY }} + - name: Build Images env: TAG: ${{ inputs.version }} shell: bash - run: | - docker tag springcloud/spring-cloud-dataflow-server:$TAG-jdk11 springcloud/spring-cloud-dataflow-server:$TAG - docker tag springcloud/spring-cloud-dataflow-composed-task-runner:$TAG-jdk11 springcloud/spring-cloud-dataflow-composed-task-runner:$TAG - docker tag springcloud/spring-cloud-dataflow-tasklauncher-sink-kafka:$TAG-jdk11 springcloud/spring-cloud-dataflow-tasklauncher-sink-kafka:$TAG - docker tag springcloud/spring-cloud-dataflow-tasklauncher-sink-rabbit:$TAG-jdk11 springcloud/spring-cloud-dataflow-tasklauncher-sink-rabbit:$TAG - docker tag springcloud/spring-cloud-dataflow-single-step-batch-job:$TAG-jdk11 springcloud/spring-cloud-dataflow-single-step-batch-job:$TAG - for v in 8 11 17 - do - docker push springcloud/spring-cloud-dataflow-server:$TAG-jdk$v - docker push springcloud/spring-cloud-dataflow-composed-task-runner:$TAG-jdk$v - docker push springcloud/spring-cloud-dataflow-tasklauncher-sink-kafka:$TAG-jdk$v - docker push springcloud/spring-cloud-dataflow-tasklauncher-sink-rabbit:$TAG-jdk$v - docker push springcloud/spring-cloud-dataflow-single-step-batch-job:$TAG-jdk$v - done - docker push springcloud/spring-cloud-dataflow-server:$TAG - docker push springcloud/spring-cloud-dataflow-composed-task-runner:$TAG - docker push springcloud/spring-cloud-dataflow-tasklauncher-sink-kafka:$TAG - docker push springcloud/spring-cloud-dataflow-tasklauncher-sink-rabbit:$TAG - docker push springcloud/spring-cloud-dataflow-single-step-batch-job:$TAG + run: ${{ github.action_path }}/build-images.sh # build/publish support images - name: Publish Grafana Prometheus diff --git a/.github/actions/build-images/build-images.sh b/.github/actions/build-images/build-images.sh new file mode 100755 index 0000000000..f59bfd68d2 --- /dev/null +++ b/.github/actions/build-images/build-images.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env bash +if [ "$TAG" == "" ]; then + echo "TAG not found" + exit 1 +fi +if [ "$DEFAULT_JDK" = "" ]; then + echo "DEFAULT_JDK not found using 11" + DEFAULT_JDK=11 +else + echo "DEFAULT_JDK=$DEFAULT_JDK" +fi + +function pack_image { + JAR="$1-$TAG.jar" + REPO="$2" + v="$3" + if [ ! -f "$JAR" ]; then + echo "File not found $JAR" + exit 2 + fi + echo "Creating: $REPO:$TAG-jdk$v" + # --buildpack "paketo-buildpacks/java@10.0.0" --buildpack "paketo-buildpacks/bellsoft-liberica@10.3.2" + pack build --builder gcr.io/paketo-buildpacks/builder:base \ + --path "$JAR" \ + --trust-builder --verbose \ + --env BP_JVM_VERSION=$v "$REPO:$TAG-jdk$v" + RC=$? + if ((RC!=0)); then + echo "Error $RC packaging $JAR" + exit $RC + fi + echo "Created: $REPO:$TAG-jdk$v" +} +LEN=$(jq '.include | length' .github/workflows/images.json) +for ((i = 0; i < LEN; i++)); do + TARGET="$(jq -r --argjson index $i '.include[$index] | .path' .github/workflows/images.json)" + IMAGE="$(jq -r --argjson index $i '.include[$index] | .image' .github/workflows/images.json)" + ARTIFACT_ID="$(jq -r --argjson index $i '.include[$index] | .name' .github/workflows/images.json)" + # 8 11 17 21 + for v in 8 11 17; do + pack_image "$TARGET/$ARTIFACT_ID" $IMAGE $v $ARTIFACT_ID + RC=$? + if [ $RC -ne 0 ]; then + exit $RC + fi + docker push "$IMAGE:$TAG-jdk$v" + echo "Pushed $IMAGE:$TAG-jdk$v" + if [ "$DEFAULT_JDK" == "$v" ]; then + docker tag "$IMAGE:$TAG-jdk$DEFAULT_JDK" "$IMAGE:$TAG" + docker push "$IMAGE:$TAG" + echo "Pushed $IMAGE:$TAG" + fi + done +done +echo "Pruning Docker" +docker system prune -f +docker system prune --volumes -f diff --git a/.github/actions/build-package-bundle/action.yml b/.github/actions/build-package-bundle/action.yml index f8f542d2d6..33dcac2e01 100644 --- a/.github/actions/build-package-bundle/action.yml +++ b/.github/actions/build-package-bundle/action.yml @@ -7,6 +7,21 @@ inputs: skipper-version: description: skipper version required: true + server-version: + description: server version + required: true + server-repository: + description: server repository + required: true + skipper-repository: + description: skipper repository + required: true + ctr-version: + description: ctr version + required: true + package-name: + description: package name + required: true package-bundle-template: description: path to package bundle template required: true @@ -16,6 +31,15 @@ inputs: config: description: path to ytt config files dir required: true + project-directory: + description: The working directory + required: true + SRP_CLIENT_ID: + description: secrets.SRP_CLIENT_ID + required: false + SRP_CLIENT_SECRET: + description: secrets.SRP_CLIENT_SECRET + required: false outputs: bundle-path: description: Location path where bundle was build @@ -24,43 +48,37 @@ outputs: runs: using: "composite" steps: - - uses: vmware-tanzu/carvel-setup-action@v1 - name: Tanzu Dance id: tanzu-dance env: DATAFLOW_VERSION: ${{ inputs.dataflow-version }} SKIPPER_VERSION: ${{ inputs.skipper-version }} + SKIPPER_REPOSITORY: ${{ inputs.skipper-repository }} + SERVER_VERSION: ${{ inputs.server-version }} + SERVER_REPOSITORY: ${{ inputs.server-repository }} + CTR_VERSION: ${{ inputs.ctr-version }} + PACKAGE_NAME: ${{ inputs.package-name }} PACKAGE_BUNDLE_TEMPLATE: ${{ inputs.package-bundle-template }} PACKAGE_BUNDLE_GENERATED: ${{ runner.temp }}/generated/packagebundle VENDIR_SRC_IN: ${{ inputs.config }} IMGPKG_LOCK_TEMPLATE: ${{ inputs.imgpkg-lock-template }} IMGPKG_LOCK_GENERATED_IN: ${{ runner.temp }}/generated/imgpkgin IMGPKG_LOCK_GENERATED_OUT: ${{ runner.temp }}/generated/imgpkgout + SRP_CLIENT_ID: ${{ inputs.SRP_CLIENT_ID }} + SRP_CLIENT_SECRET: ${{ inputs.SRP_CLIENT_SECRET }} shell: bash + working-directory: ${{ inputs.project-directory }} run: | - echo "::set-output name=bundle-path::$PACKAGE_BUNDLE_GENERATED" - ytt \ - -f $PACKAGE_BUNDLE_TEMPLATE \ - --output-files $PACKAGE_BUNDLE_GENERATED \ - --data-value-yaml project.version=$DATAFLOW_VERSION \ - --data-value-yaml spring.cloud.skipper.version=$SKIPPER_VERSION \ - --file-mark 'config/values.yml:type=text-template' - ytt \ - -f $IMGPKG_LOCK_TEMPLATE \ - --output-files $IMGPKG_LOCK_GENERATED_IN \ - --data-value-yaml project.version=$DATAFLOW_VERSION \ - --data-value-yaml spring.cloud.skipper.version=$SKIPPER_VERSION \ - --file-mark '**/*.yml:type=text-template' - cp -R $VENDIR_SRC_IN $PACKAGE_BUNDLE_GENERATED/config/upstream - vendir sync --chdir $PACKAGE_BUNDLE_GENERATED - mkdir -p $IMGPKG_LOCK_GENERATED_OUT - - for DIR in $(ls $IMGPKG_LOCK_GENERATED_IN) - do - ytt -f $PACKAGE_BUNDLE_GENERATED -f $IMGPKG_LOCK_GENERATED_IN/$DIR > $IMGPKG_LOCK_GENERATED_OUT/$DIR.yml - done - - mkdir -p $PACKAGE_BUNDLE_GENERATED/.imgpkg - kbld \ - -f $IMGPKG_LOCK_GENERATED_OUT \ - --imgpkg-lock-output $PACKAGE_BUNDLE_GENERATED/.imgpkg/images.yml + echo "bundle-path=$PACKAGE_BUNDLE_GENERATED" >> $GITHUB_OUTPUT + SCDF_DIR="${{ inputs.project-directory || '.' }}" + if [ "$USE_SRP" == "true" ]; then + if [ "$SRP_SCRIPTS" == "" ]; then + echo "SRP_SCRIPTS not defined" + exit 2 + fi + export OBSERVATION=package-bundle + export OUTPUT_VERSION=$SERVER_VERSION + $SRP_SCRIPTS/run-under-observer.sh "${{ github.action_path }}/build-package-bundle.sh" + else + "${{ github.action_path }}/build-package-bundle.sh" + fi diff --git a/.github/actions/build-package-bundle/build-package-bundle.sh b/.github/actions/build-package-bundle/build-package-bundle.sh new file mode 100755 index 0000000000..be92cb92ab --- /dev/null +++ b/.github/actions/build-package-bundle/build-package-bundle.sh @@ -0,0 +1,91 @@ +#!/usr/bin/env bash + +function check_env() { + eval ev='$'$1 + if [ "$ev" == "" ]; then + echo "env var $1 not defined" + if ((sourced != 0)); then + return 1 + else + exit 1 + fi + fi +} + +TMP=$(mktemp -d) +if [ "$PACKAGE_BUNDLE_GENERATED" = "" ]; then + export PACKAGE_BUNDLE_GENERATED="$TMP/generated/packagebundle" +fi +mkdir -p "$PACKAGE_BUNDLE_GENERATED" +if [ "$IMGPKG_LOCK_GENERATED_IN" = "" ]; then + export IMGPKG_LOCK_GENERATED_IN="$TMP/generated/imgpkgin" +fi +mkdir -p "$IMGPKG_LOCK_GENERATED_IN" +if [ "$IMGPKG_LOCK_GENERATED_OUT" = "" ]; then + export IMGPKG_LOCK_GENERATED_OUT="$TMP/generated/imgpkgout" +fi +mkdir -p "$IMGPKG_LOCK_GENERATED_OUT" + +check_env PACKAGE_BUNDLE_TEMPLATE +check_env SERVER_VERSION +check_env SERVER_REPOSITORY +check_env DATAFLOW_VERSION +check_env SKIPPER_VERSION +check_env SKIPPER_REPOSITORY +check_env PACKAGE_NAME +check_env IMGPKG_LOCK_TEMPLATE +check_env VENDIR_SRC_IN + +echo "Build Package Bundle: $PACKAGE_BUNDLE_TEMPLATE package.name=$PACKAGE_NAME, server.repository=$SERVER_REPOSITORY, server.version=$SERVER_VERSION,skipper.repository=$SKIPPER_REPOSITORY, skipper.version=$SKIPPER_VERSION, output=$PACKAGE_BUNDLE_GENERATED" +set +e +time ls > /dev/null 2>&1 +RC=$? +if((RC=0)); then + MEASURE="time -v -o times.txt -a" +else + MEASURE="" +fi +set -e +echo "ytt -f $PACKAGE_BUNDLE_TEMPLATE" > times.txt + +$MEASURE ytt -f "$PACKAGE_BUNDLE_TEMPLATE" \ + --output-files "$PACKAGE_BUNDLE_GENERATED" \ + --data-value-yaml server.version="$SERVER_VERSION" \ + --data-value-yaml server.repository="$SERVER_REPOSITORY" \ + --data-value-yaml ctr.version="$DATAFLOW_VERSION" \ + --data-value-yaml dataflow.version="$DATAFLOW_VERSION" \ + --data-value-yaml skipper.version="$SKIPPER_VERSION" \ + --data-value-yaml skipper.repository="$SKIPPER_REPOSITORY" \ + --data-value-yaml grafana.version="$DATAFLOW_VERSION" \ + --data-value-yaml package.name="$PACKAGE_NAME" \ + --file-mark 'config/values.yml:type=text-template' \ + --file-mark '.imgpkg/bundle.yaml:type=text-template' +echo "ytt -f $IMGPKG_LOCK_TEMPLATE" >> times.txt +$MEASURE ytt -f "$IMGPKG_LOCK_TEMPLATE" \ + --output-files "$IMGPKG_LOCK_GENERATED_IN" \ + --data-value-yaml server.version="$SERVER_VERSION" \ + --data-value-yaml server.repository="$SERVER_REPOSITORY" \ + --data-value-yaml ctr.version="$DATAFLOW_VERSION" \ + --data-value-yaml dataflow.version="$DATAFLOW_VERSION" \ + --data-value-yaml skipper.version="$SKIPPER_VERSION" \ + --data-value-yaml skipper.repository="$SKIPPER_REPOSITORY" \ + --data-value-yaml grafana.version="$DATAFLOW_VERSION" \ + --file-mark '**/*.yml:type=text-template' + +mkdir -p "$PACKAGE_BUNDLE_GENERATED/config/upstream" +cp -R "$VENDIR_SRC_IN" "$PACKAGE_BUNDLE_GENERATED/config/upstream" +echo "vendir -f $IMGPKG_LOCK_TEMPLATE" >> times.txt +$MEASURE vendir sync --chdir "$PACKAGE_BUNDLE_GENERATED" +mkdir -p "$IMGPKG_LOCK_GENERATED_OUT" + +for DIR in $(ls $IMGPKG_LOCK_GENERATED_IN); do + echo "ytt for $DIR" >> times.txt + $MEASURE ytt -f "$PACKAGE_BUNDLE_GENERATED" -f "$IMGPKG_LOCK_GENERATED_IN/$DIR" > "$IMGPKG_LOCK_GENERATED_OUT/$DIR.yml" +done + +mkdir -p "$PACKAGE_BUNDLE_GENERATED/.imgpkg" +echo "kbld -f $IMGPKG_LOCK_GENERATED_OUT" >> times.txt +$MEASURE kbld -f "$IMGPKG_LOCK_GENERATED_OUT" \ + --imgpkg-lock-output "$PACKAGE_BUNDLE_GENERATED/.imgpkg/images.yml" + +cat times.txt \ No newline at end of file diff --git a/.github/actions/build-repository-bundle/action.yml b/.github/actions/build-repository-bundle/action.yml index 8cf91d4586..7b48b64689 100644 --- a/.github/actions/build-repository-bundle/action.yml +++ b/.github/actions/build-repository-bundle/action.yml @@ -10,6 +10,18 @@ inputs: package-bundle-repository: description: repository for package bundles required: true + package-name: + description: package name + required: true + project-directory: + description: The working directory + required: true + SRP_CLIENT_ID: + description: secrets.SRP_CLIENT_ID + required: false + SRP_CLIENT_SECRET: + description: secrets.SRP_CLIENT_SECRET + required: false outputs: bundle-path: description: Location path where bundle was build @@ -18,38 +30,30 @@ outputs: runs: using: "composite" steps: - - uses: vmware-tanzu/carvel-setup-action@v1 - name: Tanzu Dance id: tanzu-dance env: - VERSION: ${{ inputs.version }} + PACKAGE_VERSION: ${{ inputs.version }} PACKAGE_BUNDLE_REPOSITORY: ${{ inputs.package-bundle-repository }} + PACKAGE_NAME: ${{ inputs.package-name }} REPO_BUNDLE_TEMPLATE: ${{ inputs.repo-bundle-template }} REPO_BUNDLE_RENDERED: ${{ runner.temp }}/generated/reporendered REPO_BUNDLE_GENERATED: ${{ runner.temp }}/generated/repobundle + SRP_CLIENT_ID: ${{ inputs.SRP_CLIENT_ID }} + SRP_CLIENT_SECRET: ${{ inputs.SRP_CLIENT_SECRET }} shell: bash + working-directory: ${{ inputs.project-directory }} run: | - echo "::set-output name=bundle-path::$REPO_BUNDLE_GENERATED" - - mkdir -p $REPO_BUNDLE_GENERATED/packages - mkdir -p $REPO_BUNDLE_GENERATED/.imgpkg - ytt \ - -f $REPO_BUNDLE_TEMPLATE \ - --output-files $REPO_BUNDLE_RENDERED \ - --data-value-yaml project.version=$VERSION \ - --data-value-yaml repository=$PACKAGE_BUNDLE_REPOSITORY \ - --file-mark 'package.yml:type=text-plain' \ - --file-mark 'metadata.yml:type=text-plain' \ - --file-mark 'values-schema.yml:type=text-plain' \ - --file-mark 'values-schema.star:type=text-plain' \ - --file-mark 'values-schema.star:for-output=true' \ - --file-mark 'versions.yml:type=text-template' - - ytt \ - -f $REPO_BUNDLE_RENDERED \ - --file-mark 'values-schema.yml:type=data' \ - > $REPO_BUNDLE_GENERATED/packages/packages.yml - - kbld \ - --file $REPO_BUNDLE_GENERATED/packages \ - --imgpkg-lock-output $REPO_BUNDLE_GENERATED/.imgpkg/images.yml + echo "bundle-path=$REPO_BUNDLE_GENERATED" >> $GITHUB_OUTPUT + SCDF_DIR="${{ inputs.project-directory || '.' }}" + if [ "$USE_SRP" == "true" ]; then + if [ "$SRP_SCRIPTS" == "" ]; then + echo "SRP_SCRIPTS not defined" + exit 2 + fi + export OBSERVATION=repository-bundle + export OUTPUT_VERSION=$PACKAGE_VERSION + $SRP_SCRIPTS/run-under-observer.sh "${{ github.action_path }}/build-repository-bundle.sh" + else + "${{ github.action_path }}/build-repository-bundle.sh" + fi diff --git a/.github/actions/build-repository-bundle/build-repository-bundle.sh b/.github/actions/build-repository-bundle/build-repository-bundle.sh new file mode 100644 index 0000000000..b090ba5479 --- /dev/null +++ b/.github/actions/build-repository-bundle/build-repository-bundle.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env bash +function check_env() { + eval ev='$'$1 + if [ "$ev" == "" ]; then + echo "env var $1 not defined" + if ((sourced != 0)); then + return 1 + else + exit 1 + fi + fi +} + +TMP=$(mktemp -d) +if [ "$REPO_BUNDLE_GENERATED" = "" ]; then + export REPO_BUNDLE_GENERATED="$TMP/generated/repobundle" +fi +mkdir -p $REPO_BUNDLE_GENERATED/packages +mkdir -p $REPO_BUNDLE_GENERATED/.imgpkg + +if [ "$REPO_BUNDLE_RENDERED" = "" ]; then + export REPO_BUNDLE_RENDERED="$TMP/generated/reporendered" +fi +mkdir -p "$REPO_BUNDLE_RENDERED" + +check_env REPO_BUNDLE_TEMPLATE +check_env REPO_BUNDLE_RENDERED +check_env PACKAGE_VERSION +check_env PACKAGE_BUNDLE_REPOSITORY +check_env PACKAGE_NAME + +echo "Build Repository Bundle: $REPO_BUNDLE_TEMPLATE, project.version=$PACKAGE_VERSION, package.name=$PACKAGE_NAME, repository=$PACKAGE_BUNDLE_REPOSITORY, output=$REPO_BUNDLE_RENDERED" + +set -e + +ytt \ + -f $REPO_BUNDLE_TEMPLATE \ + --output-files $REPO_BUNDLE_RENDERED \ + --data-value-yaml project.version=$PACKAGE_VERSION \ + --data-value-yaml repository=$PACKAGE_BUNDLE_REPOSITORY \ + --data-value-yaml package.name=$PACKAGE_NAME \ + --data-value-yaml package.timestamp=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \ + --file-mark 'package.yml:type=text-plain' \ + --file-mark 'metadata.yml:type=text-plain' \ + --file-mark 'values-schema.yml:type=text-plain' \ + --file-mark 'values-schema.star:type=text-plain' \ + --file-mark 'values-schema.star:for-output=true' \ + --file-mark 'versions.yml:type=text-template' + +ytt \ + -f $REPO_BUNDLE_RENDERED \ + --file-mark 'values-schema.yml:type=data' \ + > $REPO_BUNDLE_GENERATED/packages/packages.yml + +kbld \ + --file $REPO_BUNDLE_GENERATED/packages \ + --imgpkg-lock-output $REPO_BUNDLE_GENERATED/.imgpkg/images.yml diff --git a/.github/actions/install-xmlutils/action.yml b/.github/actions/install-xmlutils/action.yml new file mode 100644 index 0000000000..14fc435087 --- /dev/null +++ b/.github/actions/install-xmlutils/action.yml @@ -0,0 +1,11 @@ +name: 'Install xsltproc' +description: 'Install xsltproc' + +runs: + using: composite + steps: + - name: 'Install xmlutils' + shell: bash + run: | + sudo apt-get update -q -y + sudo apt-get install -q -y xsltproc libxml2-utils diff --git a/.github/actions/publish-bundle/action.yml b/.github/actions/publish-bundle/action.yml index b4337763dc..b150fafa0d 100644 --- a/.github/actions/publish-bundle/action.yml +++ b/.github/actions/publish-bundle/action.yml @@ -7,17 +7,21 @@ inputs: repository: description: repository to publish required: true + project-directory: + description: The working directory + required: true version: description: semver version required: true - prerelease: - description: semver version prerelease + SRP_CLIENT_ID: + description: secrets.SRP_CLIENT_ID + required: false + SRP_CLIENT_SECRET: + description: secrets.SRP_CLIENT_SECRET required: false - runs: using: "composite" steps: - - uses: vmware-tanzu/carvel-setup-action@v1 - name: Setup Envs shell: bash run: | @@ -28,13 +32,23 @@ runs: BUNDLE_PATH: ${{ inputs.path }} REPOSITORY: ${{ inputs.repository }} VERSION: ${{ inputs.version }} - PRERELEASE: ${{ inputs.prerelease }} + SRP_CLIENT_ID: ${{ inputs.SRP_CLIENT_ID }} + SRP_CLIENT_SECRET: ${{ inputs.SRP_CLIENT_SECRET }} shell: bash + working-directory: ${{ inputs.project-directory }} run: | - imgpkg push --bundle $REPOSITORY:$VERSION-RANDOM.$RTAG --file $BUNDLE_PATH - docker pull $REPOSITORY:$VERSION-RANDOM.$RTAG - - POSTFIX=$VERSION${PRERELEASE:+"-$PRERELEASE"} - - docker tag $REPOSITORY:$VERSION-RANDOM.$RTAG $REPOSITORY:$POSTFIX - docker push $REPOSITORY:$POSTFIX + if [ "$USE_SRP" == "true" ]; then + if [ "$SRP_SCRIPTS" == "" ]; then + echo "SRP_SCRIPTS not defined" + exit 2 + fi + if [[ "$REPOSITORY" == *"-package"* ]]; then + export OBSERVATION=package-publish + else + export OBSERVATION=repository-publish + fi + export OUTPUT_VERSION=$VERSION + $SRP_SCRIPTS/run-under-observer.sh "${{ github.action_path }}/publish-bundle.sh" + else + "${{ github.action_path }}/publish-bundle.sh" + fi diff --git a/.github/actions/publish-bundle/publish-bundle.sh b/.github/actions/publish-bundle/publish-bundle.sh new file mode 100644 index 0000000000..a6613d697e --- /dev/null +++ b/.github/actions/publish-bundle/publish-bundle.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +set -e +IMG_PKG_OPT= +if [ "$USE_SRP" == "true" ]; then + IMG_PKG_OPT="--debug" + if [ "$SSL_CERT_FILE" != "" ] && [ -f "$SSL_CERT_FILE" ]; then + IMG_PKG_OPT="$IMG_PKG_OPT --registry-ca-cert-path $SSL_CERT_FILE" + else + IMG_PKG_OPT="$IMG_PKG_OPT --registry-verify-certs=false" + fi +fi +if [ "$IMG_PKG_OPT" != "" ]; then + echo "IMG_PKG_OPT=$IMG_PKG_OPT" +fi +set +e +IMAGE_ID=$(docker images --digests --format json | jq -r --arg IMAGE_URL "$REPOSITORY" --arg TAG "$VERSION" 'select(.Repository == $IMAGE_URL and .Tag == $TAG)' | jq -r --slurp 'map({ID: .ID}) | unique | .[] | .ID') +if [ "$IMAGE_ID" != "" ]; then + echo "Removing all images with ID=$IMAGE_ID for $REPOSITORY" + docker images --digests | grep -F "$IMAGE_ID" + docker rmi --force $IMAGE_ID +fi +set -e +imgpkg push $IMG_PKG_OPT --bundle "$REPOSITORY:$VERSION" --file "$BUNDLE_PATH" diff --git a/.github/labels-manage.yml b/.github/labels-manage.yml deleted file mode 100644 index 574e9bc96f..0000000000 --- a/.github/labels-manage.yml +++ /dev/null @@ -1,206 +0,0 @@ -- name: area/batch-task - color: F9D0C4 - description: Belongs to batch and task -- name: area/carvel - color: F9D0C4 - description: Belongs to carvel app features -- name: area/composed-tasks - color: F9D0C4 - description: Belongs to ctr -- name: area/dependencies - color: F9D0C4 - description: Belongs project dependencies -- name: area/docker - color: F9D0C4 - description: Belongs to docker -- name: area/dsl - color: F9D0C4 - description: Belongs to dsl -- name: area/documentation - color: F9D0C4 - description: Belongs to documentation -- name: area/fan-in-fan-out - color: F9D0C4 - description: Belongs Fan -- name: area/flo-scdf-integration - color: F9D0C4 - description: Belongs to Flo -- name: area/helm-charts - color: F9D0C4 - description: Belongs to helm -- name: area/micrometer - color: F9D0C4 - description: Belongs to micrometer -- name: area/performance-optimization - color: F9D0C4 - description: Belongs to performance -- name: area/security - color: F9D0C4 - description: Belongs to security -- name: area/skipper - color: F9D0C4 - description: Belongs to skipper -- name: area/task-orchestration - color: F9D0C4 - description: Belongs to task orchestration -- name: area/task-scheduler - color: F9D0C4 - description: Belongs to task scheduling -- name: area/tests - color: F9D0C4 - description: Belongs to tests - -- name: automation/rlnotes-header - color: EDEDED - description: Belongs to release notes automation -- name: automation/rlnotes-footer - color: EDEDED - description: Belongs to release notes automation - -- name: for/angular4-upgrade - color: E99695 - description: For Angular 4 update -- name: for/backport - color: E99695 - description: For backporting -- name: for/blocker - color: E99695 - description: For blocking -- name: for/composed-tasks - color: E99695 - description: For Composed Tasks -- name: for/fan-in-fan-out - color: E99695 - description: For Fan -- name: for/flo-scdf-integration - color: E99695 - description: For Flow integration -- name: for/marketing - color: E99695 - description: For marketing -- name: for/spike - color: E99695 - description: For spike -- name: for/team-attention - color: E99695 - description: For team attention -- name: for/ux-improvement - color: E99695 - description: For UX improvement - -- name: status/complete - color: FEF2C0 - description: Issue is now complete -- name: status/declined - color: FEF2C0 - description: Issue has been declined -- name: status/duplicate - color: FEF2C0 - description: There were an existing issue -- name: status/in-progress - color: FEF2C0 - description: Something is happening -- name: status/invalid - color: FEF2C0 - description: Mistake, bogus, old, bye bye -- name: status/need-design - color: FEF2C0 - description: Vague so need some proper design -- name: status/need-feedback - color: FEF2C0 - description: Calling participant to provide feedback -- name: status/need-investigation - color: FEF2C0 - description: Oh need to look under a hood -- name: status/need-triage - color: FEF2C0 - description: Team needs to triage and take a first look -- name: status/on-hold - color: FEF2C0 - description: For various reasons is on hold -- name: status/stale - color: FEF2C0 - description: Marked as stale -- name: status/closed-as-stale - color: FEF2C0 - description: Closed as has been stale - -- name: type/automated-pr - color: D4C5F9 - description: Is an automated pr -- name: type/backport - color: D4C5F9 - description: Is a issue to track backport, use with branch/xxx -- name: type/bug - color: D4C5F9 - description: Is a bug report -- name: type/enhancement - color: D4C5F9 - description: Is an enhancement request -- name: type/epic - color: D4C5F9 - description: Collection of issues -- name: type/feature - color: D4C5F9 - description: Is a feature request -- name: type/help-needed - color: D4C5F9 - description: Calling help -- name: type/idea - color: D4C5F9 - description: Is just an idea -- name: type/task - color: D4C5F9 - description: Something needs to get done -- name: type/technical-debt - color: D4C5F9 - description: Techical Dept -- name: type/question - color: D4C5F9 - description: Is a question - -- name: branch/1.2.x - color: BFDADC - description: Issue for a branch -- name: branch/1.3.x - color: BFDADC - description: Issue for a branch -- name: branch/1.4.x - color: BFDADC - description: Issue for a branch -- name: branch/1.5.x - color: BFDADC - description: Issue for a branch -- name: branch/1.6.x - color: BFDADC - description: Issue for a branch -- name: branch/1.7.x - color: BFDADC - description: Issue for a branch -- name: branch/2.0.x - color: BFDADC - description: Issue for a branch -- name: branch/2.1.x - color: BFDADC - description: Issue for a branch -- name: branch/2.2.x - color: BFDADC - description: Issue for a branch -- name: branch/2.3.x - color: BFDADC - description: Issue for a branch -- name: branch/2.4.x - color: BFDADC - description: Issue for a branch -- name: branch/2.5.x - color: BFDADC - description: Issue for a branch -- name: branch/2.6.x - color: BFDADC - description: Issue for a branch -- name: branch/2.7.x - color: BFDADC - description: Issue for a branch -- name: branch/2.8.x - color: BFDADC - description: Issue for a branch diff --git a/.github/rlnotes.mustache b/.github/rlnotes.mustache index 5bb3978bd0..4c59c73f18 100644 --- a/.github/rlnotes.mustache +++ b/.github/rlnotes.mustache @@ -11,13 +11,7 @@ # Dependent Projects and Compatibility Component | Version --- | --- -{{projects.spring_cloud_dataflow_build.name}}|{{projects.spring_cloud_dataflow_build.version}} -{{projects.spring_cloud_dataflow_common.name}}|{{projects.spring_cloud_dataflow_common.version}} {{projects.spring_cloud_deployer.name}}|{{projects.spring_cloud_deployer.version}} -{{projects.spring_cloud_deployer_local.name}}|{{projects.spring_cloud_deployer_local.version}} -{{projects.spring_cloud_deployer_cloudfoundry.name}}|{{projects.spring_cloud_deployer_cloudfoundry.version}} -{{projects.spring_cloud_deployer_kubernetes.name}}|{{projects.spring_cloud_deployer_kubernetes.version}} -{{projects.spring_cloud_common_security_config.name}}|{{projects.spring_cloud_common_security_config.version}} {{projects.spring_cloud_skipper.name}}|{{projects.spring_cloud_skipper.version}} {{projects.spring_cloud_dataflow_ui.name}}|{{projects.spring_cloud_dataflow_ui.version}} {{projects.spring_cloud_dataflow.name}}|{{projects.spring_cloud_dataflow.version}} @@ -25,7 +19,7 @@ Component | Version # Issues {{#issues}} -{{repo}}#{{number}} {{title}} +* {{repo}}#{{number}} {{title}} {{/issues}} {{#footerslength}} diff --git a/.github/settings.xml b/.github/settings.xml index b9cb56112a..a36e075d4b 100644 --- a/.github/settings.xml +++ b/.github/settings.xml @@ -1,34 +1,100 @@ - stagingmilestone + pr + + true + - spring-staging - Spring Staging - https://repo.spring.io/libs-staging-local + maven-central + Maven Central + https://repo.maven.apache.org/maven2 false + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + spring-milestones Spring Milestones - https://repo.spring.io/libs-milestone + https://repo.spring.io/milestone + + false + + + + + + stagingmilestone + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 false - spring-releases - Spring Releases - https://repo.spring.io/libs-release + spring-staging + Spring Staging + https://repo.spring.io/staging + + false + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone false + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + spring-staging Spring Staging @@ -37,33 +103,49 @@ false + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + stagingrelease - spring-staging - Spring Staging - https://repo.spring.io/libs-staging-local + maven-central + Maven Central + https://repo.maven.apache.org/maven2 false - spring-releases - Spring Releases - https://repo.spring.io/libs-release + spring-staging + Spring Staging + https://repo.spring.io/staging false + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + spring-staging Spring Staging - https://repo.spring.io/libs-staging-local + https://repo.spring.io/staging false diff --git a/.github/workflows/build-image.sh b/.github/workflows/build-image.sh new file mode 100755 index 0000000000..72670f255c --- /dev/null +++ b/.github/workflows/build-image.sh @@ -0,0 +1,101 @@ +#!/usr/bin/env bash +if [ -z "$BASH_VERSION" ]; then + echo "This script requires Bash. Use: bash $0 $*" + exit 1 +fi +SCDIR=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")") +set +e +if [ "$PUSH" == "" ]; then + PUSH=true +fi +if [ "$TAG" == "" ]; then + echo "TAG not found" + exit 1 +fi +if [ "$DEFAULT_JDK" = "" ]; then + echo "DEFAULT_JDK not found using 11" + DEFAULT_JDK=11 +else + echo "DEFAULT_JDK=$DEFAULT_JDK" +fi + +function download_image() { + TARGET=$1 + ARTIFACT_ID=$2 + VERSION=$3 + TARGET_FILE=$TARGET/$ARTIFACT_ID-$VERSION.jar + pushd $SCDIR/download-jar > /dev/null || exit + ./gradlew downloadJar -PartifactId=$ARTIFACT_ID -PartifactVersion=$VERSION -PartifactPath=$TARGET + RC=$? + if((RC != 0)); then + exit $RC + fi + popd > /dev/null || exit + if [ ! -f $TARGET_FILE ]; then + echo "Cannot find $TARGET_FILE" + ls -al $TARGET + exit 2 + fi + echo "Downloaded $TARGET_FILE" +} + +TARGET=$(realpath $1) +REPO="$2" +ARTIFACT_ID=$3 + +if [ "$ARTIFACT_ID" = "" ]; then + echo "Usage: " +fi +JAR="$TARGET/$ARTIFACT_ID-$TAG.jar" +if [ ! -f "$JAR" ]; then + echo "$JAR not found downloading" + download_image "$TARGET" "$ARTIFACT_ID" "$TAG" + RC=$? + if((RC != 0)); then + exit $RC + fi +fi +for v in 8 11 17; do + echo "Creating: $REPO:$TAG-jdk$v" + pack build --builder gcr.io/paketo-buildpacks/builder:base \ + --path "$JAR" \ + --trust-builder --verbose \ + --env BP_JVM_VERSION=$v "$REPO:$TAG-jdk$v" + RC=$? + if((RC != 0)); then + exit $RC + fi + echo "Created: $REPO:$TAG-jdk$v" + if [ "$PUSH" == "true" ]; then + if [ "$DELETE_TAGS" == "true" ]; then + $SCDIR/docker-rm-tag.sh $REPO $TAG-jdk$v + fi + docker push "$REPO:$TAG-jdk$v" + RC=$? + if ((RC!=0)); then + exit $RC + fi + echo "Pushed $REPO:$TAG-jdk$v" + else + echo "Skipped push $REPO:$TAG-jdk$v" + fi + + if [ "$DEFAULT_JDK" == "$v" ]; then + docker tag "$REPO:$TAG-jdk$DEFAULT_JDK" "$REPO:$TAG" + if [ "$PUSH" == "true" ]; then + if [ "$DELETE_TAGS" == "true" ]; then + $SCDIR/docker-rm-tag.sh $REPO $TAG-jdk$v + fi + docker push "$REPO:$TAG" + echo "Pushed $REPO:$TAG" + else + echo "Skipped push $REPO:$TAG" + fi + fi +done +#if [ "$PUSH" == "true" ]; then +# echo "Pruning Docker" +# docker system prune -f +# docker system prune --volumes -f +#fi + diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml new file mode 100644 index 0000000000..d36b174122 --- /dev/null +++ b/.github/workflows/build-images.yml @@ -0,0 +1,117 @@ +name: build-images + +on: + workflow_call: + inputs: + version: + type: string + description: 'Version' + required: false + delete-tags: + type: boolean + default: false + description: 'Delete the image tags' + + secrets: + DOCKERHUB_USERNAME: + DOCKERHUB_TOKEN: + GCR_JSON_KEY: + ARTIFACTORY_USERNAME: + ARTIFACTORY_PASSWORD: + +jobs: + # test templating before publishing a package + prepare: + name: Prepare Job + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 + with: + java-version: '8' + distribution: 'liberica' + - name: Load matrix + id: matrix + shell: bash + run: | + echo "Input version: ${{ inputs.version }}" + if [ "${{ inputs.version }}" == "" ]; then + ./mvnw help:evaluate -Dexpression=project.version -q + echo "VERSION=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout)" >> $GITHUB_ENV + else + echo "VERSION=${{ inputs.version }}" >> $GITHUB_ENV + fi + MATRIX=$(cat .github/workflows/images.json | jq -c) + echo "MATRIX=$MATRIX" + echo "MATRIX=$MATRIX" >> $GITHUB_ENV + outputs: + matrix: ${{ env.MATRIX }} + version: ${{ env.VERSION }} + publish: + name: Publish + runs-on: ubuntu-latest + needs: + - prepare + strategy: + matrix: ${{ fromJson(needs.prepare.outputs.matrix) }} + concurrency: + group: ${{ matrix.name }} + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 + with: + java-version: '8' + distribution: 'liberica' + - name: Install pack + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + pack-version: 0.30.0 + - name: Login to docker.io + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Login to GCR + uses: docker/login-action@v3 + with: + registry: gcr.io + username: _json_key + password: ${{ secrets.GCR_JSON_KEY }} + - name: Build and Publish ${{ matrix.name }} + shell: bash + env: + TAG: ${{ needs.prepare.outputs.version }} + DEFAULT_JDK: '11' + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + DELETE_TAGS: ${{ inputs.delete-tags }} + run: | + .github/workflows/build-image.sh ${{ matrix.path }} ${{ matrix.image }} ${{ matrix.name }} + build-extra: + name: Build extra images + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Login to docker.io + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Publish Grafana Prometheus + uses: docker/build-push-action@v2 + with: + context: src/grafana/prometheus/docker/grafana + push: true + tags: springcloud/spring-cloud-dataflow-grafana-prometheus:${{ inputs.version }} + - name: Publish Grafana InfluxDB + uses: docker/build-push-action@v2 + with: + context: src/grafana/influxdb/docker/grafana + push: true + tags: springcloud/spring-cloud-dataflow-grafana-influxdb:${{ inputs.version }} + - name: Publish Prometheus Local + uses: docker/build-push-action@v2 + with: + context: src/grafana/prometheus/docker/prometheus-local + push: true + tags: springcloud/spring-cloud-dataflow-prometheus-local:${{ inputs.version }} diff --git a/.github/workflows/build-snapshot-controller.yml b/.github/workflows/build-snapshot-controller.yml index 217c2cc98d..a543c91d5d 100644 --- a/.github/workflows/build-snapshot-controller.yml +++ b/.github/workflows/build-snapshot-controller.yml @@ -26,30 +26,10 @@ jobs: { "if": "initial == true", "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-dataflow-build", - "ref": "main", - "workflow": "build-snapshot-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-build' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-dataflow-common", - "ref": "main", - "workflow": "build-snapshot-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-common' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", "workflow_dispatch": { "owner": "spring-cloud", "repo": "spring-cloud-deployer", - "ref": "main", + "ref": "2.9.x", "workflow": "build-snapshot-worker.yml" } }, @@ -58,69 +38,29 @@ jobs: "action": "workflow_dispatch", "workflow_dispatch": { "owner": "spring-cloud", - "repo": "spring-cloud-deployer-local", - "ref": "main", - "workflow": "build-snapshot-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer-local' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-deployer-cloudfoundry", - "ref": "main", - "workflow": "build-snapshot-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer-cloudfoundry' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-deployer-kubernetes", - "ref": "main", - "workflow": "build-snapshot-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer-kubernetes' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-common-security-config", - "ref": "main", - "workflow": "build-snapshot-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-common-security-config' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-skipper", - "ref": "main", + "repo": "spring-cloud-dataflow-ui", + "ref": "3.4.x", "workflow": "build-snapshot-worker.yml" } }, { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-skipper' && data.owner == 'spring-cloud'", + "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-ui' && data.owner == 'spring-cloud'", "action": "workflow_dispatch", "workflow_dispatch": { "owner": "spring-cloud", - "repo": "spring-cloud-dataflow-ui", - "ref": "main", + "repo": "spring-cloud-dataflow", + "ref": "2.11.x", "workflow": "build-snapshot-worker.yml" } }, { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-ui' && data.owner == 'spring-cloud'", + "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", "action": "workflow_dispatch", "workflow_dispatch": { "owner": "spring-cloud", "repo": "spring-cloud-dataflow", - "ref": "main", - "workflow": "build-snapshot-worker.yml" + "ref": "2.11.x", + "workflow": "carvel-worker.yml" } }, { @@ -129,5 +69,12 @@ jobs: "fail": { "message": "hi, something went wrong" } + }, + { + "if": "data.event == 'carvel-failed'", + "action": "fail", + "fail": { + "message": "hi, something went wrong with carvel" + } } ] diff --git a/.github/workflows/build-snapshot-worker.yml b/.github/workflows/build-snapshot-worker.yml index c0be7561c7..a8604de2f5 100644 --- a/.github/workflows/build-snapshot-worker.yml +++ b/.github/workflows/build-snapshot-worker.yml @@ -8,93 +8,205 @@ on: description: 'Build Zoo Handler Payload' required: true +env: + MAVEN_THREADS: '-T 1' + jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v1 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 with: - java-version: 1.8 + java-version: '8' + distribution: 'liberica' - uses: jvalkeal/setup-maven@v1 with: - maven-version: 3.6.3 - - uses: jfrog/setup-jfrog-cli@v1 - with: - version: 1.46.4 + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - uses: jfrog/setup-jfrog-cli@v3 env: - JF_ARTIFACTORY_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} - - name: Install pack - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - pack-version: 0.18.0 - - # cache maven .m2 - - uses: actions/cache@v2 + JF_URL: 'https://repo.spring.io' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + - uses: actions/cache@v3 with: path: ~/.m2/repository key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: | ${{ runner.os }}-m2- - - # target deploy repos + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Configure JFrog Cli run: | jfrog rt mvnc \ - --server-id-deploy=repo.spring.io \ - --repo-deploy-releases=release \ - --repo-deploy-snapshots=snapshot - echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-main >> $GITHUB_ENV + --server-id-resolve=${{ vars.JF_SERVER_ID }} \ + --server-id-deploy=${{ vars.JF_SERVER_ID }} \ + --repo-resolve-releases=libs-milestone \ + --repo-resolve-snapshots=libs-snapshot \ + --repo-deploy-releases=libs-release-local \ + --repo-deploy-snapshots=libs-snapshot-local + echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-2-11-x >> $GITHUB_ENV echo JFROG_CLI_BUILD_NUMBER=$GITHUB_RUN_NUMBER >> $GITHUB_ENV - - # zoo extract and ensure - name: Extract Zoo Context Properties uses: jvalkeal/build-zoo-handler@v0.0.4 with: dispatch-handler-extract-context-properties: true - - # build and publish to configured target + - name: 'Install: xmllint' + uses: ./.github/actions/install-xmlutils - name: Build and Publish + shell: bash + timeout-minutes: 75 run: | - jfrog rt mvn clean install \ - -Pfull \ - -U -B + mvn clean + ./spring-cloud-dataflow-package/set-package-version.sh + jfrog rt mvn install -Pfull,docs -B + jfrog rt mvn install -pl spring-cloud-dataflow-package -B jfrog rt build-publish - echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) >> $GITHUB_ENV - echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=spring-cloud-dataflow-main >> $GITHUB_ENV + PROJECT_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$PROJECT_VERSION >> $GITHUB_ENV + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=spring-cloud-dataflow-2-11-x >> $GITHUB_ENV echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber=$GITHUB_RUN_NUMBER >> $GITHUB_ENV - - # build and publish images via composite action - - name: Build and Publish Images - uses: ./.github/actions/build-images + echo BUILD_ZOO_HANDLER_spring_cloud_skipper_version=$(mvn help:evaluate -Dexpression=spring-cloud-skipper.version -pl spring-cloud-dataflow-parent -q -DforceStdout) >> $GITHUB_ENV +# echo "Determine project version" +# set +e +# echo "::info ::Project version=$PROJECT_VERSION" +# SKIPPER_DOCS_PATTERN=$(.github/workflows/skipper-docs-name.sh $PROJECT_VERSION libs-snapshot-local) +# if [[ "$SKIPPER_DOCS_PATTERN" == *"does not exist"* ]]; then +# echo "::error ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" +# else +# echo "::info ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" +# jfrog rt sp --build "$SKIPPER_DOCS_PATTERN" "buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" +# echo "::info ::Skipper Docs Set Properties buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" +# fi + - name: Test Report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} with: - version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} - dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }} - dockerhub-password: ${{ secrets.DOCKERHUB_TOKEN }} - - # zoo success - - name: Notify Build Success Zoo Handler Controller - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} - dispatch-handler-client-payload-data: > - { - "event": "build-succeed" - } - - # zoo failure - - name: Notify Build Failure Zoo Handler Controller - if: ${{ failure() }} - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} - dispatch-handler-client-payload-data: > - { - "event": "build-failed", - "message": "spring-cloud-dataflow failed" - } - # clean m2 cache + name: Unit Tests + path: '**/surefire-reports/*.xml' + reporter: java-junit + list-tests: failed - name: Clean cache run: | find ~/.m2/repository -type d -name '*SNAPSHOT' | xargs rm -fr + outputs: + version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + BUILD_ZOO_HANDLER_spring_cloud_dataflow_version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname }} + BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber }} + BUILD_ZOO_HANDLER_spring_cloud_skipper_version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_skipper_version }} + database-tests: + if: github.repository_owner == 'spring-cloud' + runs-on: ubuntu-latest + strategy: + matrix: + db: [ 'ORACLE', 'DB2' ] + steps: + - uses: actions/checkout@v4 + - uses: actions/cache@v3 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-m2-${{ matrix.db }} + - uses: actions/setup-java@v3 + with: + java-version: '8' + distribution: 'liberica' + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: 'https://repo.spring.io' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Configure JFrog Cli + run: | + jfrog rt mvnc \ + --server-id-resolve=${{ vars.JF_SERVER_ID }} \ + --server-id-deploy=${{ vars.JF_SERVER_ID }} \ + --repo-resolve-releases=libs-milestone \ + --repo-resolve-snapshots=libs-snapshot \ + --repo-deploy-releases=libs-release-local \ + --repo-deploy-snapshots=libs-snapshot-local + - uses: ./.github/actions/install-xmlutils + - name: Test + shell: bash + timeout-minutes: 75 + run: | + jfrog rt mvn clean install -s .settings.xml -DskipTests -am -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server + export ENABLE_${{ matrix.db }}=true + jfrog rt mvn test -s .settings.xml -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server -Dgroups=${{ matrix.db }} + - name: Test Report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} + with: + name: Unit Tests + path: '**/surefire-reports/*.xml' + reporter: java-junit + list-tests: failed + - name: Capture Test Results + if: ${{ always() }} + uses: actions/upload-artifact@v3 + with: + name: test-results + path: '**/target/surefire-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore + # clean m2 cache + - name: Clean cache + run: | + find ~/.m2/repository -type d -name '*SNAPSHOT' | xargs rm -fr + images: + name: Build and Publish Images + needs: [ build ] + uses: ./.github/workflows/build-images.yml + with: + version: ${{ needs.build.outputs.version }} + secrets: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + GCR_JSON_KEY: ${{ secrets.GCR_JSON_KEY }} + + wrap: + needs: [ build, images, database-tests ] + runs-on: ubuntu-latest + steps: + - name: Save env + shell: bash + if: ${{ success() }} + run: | + echo "BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }}" >> $GITHUB_ENV + echo "BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname }}" >> $GITHUB_ENV + echo "BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber }}" >> $GITHUB_ENV + echo "BUILD_ZOO_HANDLER_spring_cloud_skipper_version=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_skipper_version }}" >> $GITHUB_ENV +# zoo success + - name: Notify Build Success Zoo Handler Controller + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "build-succeed" + } +# zoo failure + - name: Notify Build Failure Zoo Handler Controller + if: ${{ failure() }} + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "build-failed", + "message": "spring-cloud-dataflow failed" + } diff --git a/.github/workflows/build-uaa-test.yml b/.github/workflows/build-uaa-test.yml new file mode 100644 index 0000000000..04753a42d7 --- /dev/null +++ b/.github/workflows/build-uaa-test.yml @@ -0,0 +1,41 @@ +name: build-uaa-test + +on: + workflow_dispatch: + +jobs: + # test templating before publishing a package + prepare: + name: Prepare Job + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/checkout@v4 + with: + repository: cloudfoundry/uaa + ref: '4.32.0' + path: src/docker/uaa/uaa + - uses: actions/setup-java@v3 + with: + java-version: '8' + distribution: 'liberica' + - name: Build UAA Test Image + shell: bash + env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + working-directory: ./src/docker/uaa + run: ./build-uaa + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Publish Test Image + uses: docker/build-push-action@v6 + with: + context: ./src/docker/uaa + push: true + tags: springcloud/scdf-uaa-test:4.32 diff --git a/.github/workflows/carvel-worker.yml b/.github/workflows/carvel-worker.yml new file mode 100644 index 0000000000..86d7defe70 --- /dev/null +++ b/.github/workflows/carvel-worker.yml @@ -0,0 +1,71 @@ +name: Carvel Worker + +on: + workflow_dispatch: + inputs: + build-zoo-handler: + description: 'Build Zoo Handler Payload' + required: true + +jobs: + prepare: + name: Prepare + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + # zoo extract and ensure + - name: Extract Zoo Context Properties + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-extract-context-properties: true + ensure-env: BUILD_ZOO_HANDLER_spring_cloud_dataflow_version + outputs: + dataflow-version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + skipper-version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + server-version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + ctr-version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + publish: + name: Publish + needs: + - prepare + uses: ./.github/workflows/common-carvel.yml + with: + package-name: 'scdf' + package-version: ${{ needs.prepare.outputs.server-version }} + package-bundle: 'springcloud/scdf-oss-package' + repository-bundle: 'springcloud/scdf-oss-repo' + dataflow-version: ${{ needs.prepare.outputs.dataflow-version }} + server-version: ${{ needs.prepare.outputs.server-version }} + ctr-version: ${{ needs.prepare.outputs.dataflow-version }} + skipper-version: ${{ needs.prepare.outputs.skipper-version }} + server-repository: 'springcloud/spring-cloud-dataflow-server' + skipper-repository: 'springcloud/spring-cloud-skipper-server' + secrets: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + finalize: + name: Finalize + runs-on: ubuntu-latest + needs: + - publish + steps: + # zoo success + - name: Notify Build Success Zoo Handler Controller + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "carvel-succeed" + } + + # zoo failure + - name: Notify Build Failure Zoo Handler Controller + if: ${{ failure() }} + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "carvel-failed" + } diff --git a/.github/workflows/cental-sync.yml b/.github/workflows/cental-sync.yml index d2f2d08efc..aa8ca0ba92 100644 --- a/.github/workflows/cental-sync.yml +++ b/.github/workflows/cental-sync.yml @@ -16,14 +16,13 @@ jobs: steps: # to get spec file in .github - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 # Setup jfrog cli - - uses: jfrog/setup-jfrog-cli@v1 - with: - version: 1.46.4 + - uses: jfrog/setup-jfrog-cli@v3 env: - JF_ARTIFACTORY_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + JF_URL: 'https://repo.spring.io' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} # Extract build id from input - name: Extract Build Id diff --git a/.github/workflows/central-release.yml b/.github/workflows/central-release.yml index b6f677c628..a31959454c 100644 --- a/.github/workflows/central-release.yml +++ b/.github/workflows/central-release.yml @@ -9,41 +9,26 @@ on: jobs: central: - runs-on: ubuntu-latest + runs-on: ubuntu22-8-32-OSS environment: central steps: # to get spec file in .github - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 # Setup jfrog cli - - uses: jfrog/setup-jfrog-cli@v1 - with: - version: 1.46.4 + - uses: jfrog/setup-jfrog-cli@v3 env: - JF_ARTIFACTORY_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + JF_URL: 'https://repo.spring.io' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} # zoo extract and ensure - name: Extract Zoo Context Properties uses: jvalkeal/build-zoo-handler@v0.0.4 with: dispatch-handler-extract-context-properties: true ensure-env: | - BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_buildname - BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_buildname - BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_buildnumber BUILD_ZOO_HANDLER_spring_cloud_deployer_buildname BUILD_ZOO_HANDLER_spring_cloud_deployer_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_deployer_local_buildname - BUILD_ZOO_HANDLER_spring_cloud_deployer_local_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_buildname - BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_buildname - BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_common_security_config_buildname - BUILD_ZOO_HANDLER_spring_cloud_common_security_config_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_skipper_buildname - BUILD_ZOO_HANDLER_spring_cloud_skipper_buildnumber BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildname BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildnumber BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname @@ -52,30 +37,9 @@ jobs: # Download released files - name: Download Release Files run: | - jfrog rt download \ - --spec .github/release-files-spec.json \ - --spec-vars "buildname=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_buildname;buildnumber=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_buildnumber" - jfrog rt download \ - --spec .github/release-files-spec.json \ - --spec-vars "buildname=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_buildname;buildnumber=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_buildnumber" jfrog rt download \ --spec .github/release-files-spec.json \ --spec-vars "buildname=$BUILD_ZOO_HANDLER_spring_cloud_deployer_buildname;buildnumber=$BUILD_ZOO_HANDLER_spring_cloud_deployer_buildnumber" - jfrog rt download \ - --spec .github/release-files-spec.json \ - --spec-vars "buildname=$BUILD_ZOO_HANDLER_spring_cloud_deployer_local_buildname;buildnumber=$BUILD_ZOO_HANDLER_spring_cloud_deployer_local_buildnumber" - jfrog rt download \ - --spec .github/release-files-spec.json \ - --spec-vars "buildname=$BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_buildname;buildnumber=$BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_buildnumber" - jfrog rt download \ - --spec .github/release-files-spec.json \ - --spec-vars "buildname=$BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_buildname;buildnumber=$BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_buildnumber" - jfrog rt download \ - --spec .github/release-files-spec.json \ - --spec-vars "buildname=$BUILD_ZOO_HANDLER_spring_cloud_common_security_config_buildname;buildnumber=$BUILD_ZOO_HANDLER_spring_cloud_common_security_config_buildnumber" - jfrog rt download \ - --spec .github/release-files-spec.json \ - --spec-vars "buildname=$BUILD_ZOO_HANDLER_spring_cloud_skipper_buildname;buildnumber=$BUILD_ZOO_HANDLER_spring_cloud_skipper_buildnumber" jfrog rt download \ --spec .github/release-files-spec.json \ --spec-vars "buildname=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildname;buildnumber=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildnumber" diff --git a/.github/workflows/ci-carvel.yml b/.github/workflows/ci-carvel.yml index a1b9a6c224..3dd33592ba 100644 --- a/.github/workflows/ci-carvel.yml +++ b/.github/workflows/ci-carvel.yml @@ -3,97 +3,87 @@ name: CI Carvel on: push: branches: - - main + - 2.11.x paths: - 'src/carvel/**' workflow_dispatch: + inputs: + branch: + description: 'Branch or tag to use to determine version numbers' + package_version: + description: 'Package version to publish. If blank will match dataflow version' + required: false jobs: - # test templating before publishing a package - test: - name: Test + prepare: + name: Prepare runs-on: ubuntu-latest - defaults: - run: - shell: bash - working-directory: src/carvel steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 + - uses: actions/checkout@v4 + with: + ref: ${{ inputs.branch && inputs.branch || github.ref }} + path: 'target' - uses: actions/setup-node@v2 with: - node-version: 12 - - uses: vmware-tanzu/carvel-setup-action@v1 + node-version: 16 + - uses: carvel-dev/setup-action@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} - name: Setup npm - run: | - npm ci + shell: bash + working-directory: src/carvel + run: npm install + - name: npm ci + shell: bash + working-directory: src/carvel + run: npm ci - name: Lint - run: | - npm run format-check + shell: bash + working-directory: src/carvel + run: npm run format-check - name: Test - run: | - npm test - - publish: - name: Publish - runs-on: ubuntu-latest - needs: test - steps: - - uses: actions/checkout@v2 - - uses: jvalkeal/setup-maven@v1 - with: - maven-version: 3.6.2 + shell: bash + working-directory: src/carvel + run: npm test - uses: jvalkeal/setup-maven@v1 with: - maven-version: 3.6.2 - - name: Login dockerhub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' - name: Configure Env run: | - echo spring_cloud_dataflow_version=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) >> $GITHUB_ENV - echo spring_cloud_skipper_version=$(mvn help:evaluate -Dexpression=spring-cloud-skipper.version -q -DforceStdout) >> $GITHUB_ENV - echo spring_cloud_dataflow_version_nopostfix=$(echo '${parsedVersion.majorVersion}.${parsedVersion.minorVersion}.${parsedVersion.incrementalVersion}' | mvn build-helper:parse-version help:evaluate -q -DforceStdout) >> $GITHUB_ENV - - # Builds a package bundle - - name: Build Package Bundle - id: build-package-bundle - uses: ./.github/actions/build-package-bundle - with: - dataflow-version: ${{ env.spring_cloud_dataflow_version }} - skipper-version: ${{ env.spring_cloud_skipper_version }} - package-bundle-template: src/carvel/templates/bundle/package - imgpkg-lock-template: src/carvel/templates/imgpkg - config: src/carvel/config - - # Publishes scdf package bundle as it needs to be in place - # before repository bundle can be created. - - name: Publish Package Bundle - id: publish-package-bundle - uses: ./.github/actions/publish-bundle - with: - path: ${{ steps.build-package-bundle.outputs.bundle-path }} - repository: springcloud/scdf-package - version: ${{ env.spring_cloud_dataflow_version_nopostfix }} - prerelease: SNAPSHOT - - # Builds a repository bundle - - name: Build Repository Bundle - id: build-repository-bundle - uses: ./.github/actions/build-repository-bundle - with: - version: ${{ env.spring_cloud_dataflow_version }} - repo-bundle-template: src/carvel/templates/bundle/repo - package-bundle-repository: springcloud/scdf-package - - # Publishes scdf repo bundle - - name: Publish Repository Bundle - id: publish-repository-bundle - uses: ./.github/actions/publish-bundle - with: - path: ${{ steps.build-repository-bundle.outputs.bundle-path }} - repository: springcloud/scdf-repo - version: ${{ env.spring_cloud_dataflow_version_nopostfix }} - prerelease: SNAPSHOT + ROOT=$(realpath $PWD) + pushd target + source $ROOT/.github/workflows/export-app-versions.sh + echo DATAFLOW_VERSION=$DATAFLOW_VERSION >> $GITHUB_ENV + echo SKIPPER_VERSION=$SKIPPER_VERSION >> $GITHUB_ENV + if [ "${{ inputs.package_version }}" != "" ]; then + echo PACKAGE_VERSION=${{ inputs.package_version }} >> $GITHUB_ENV + else + echo PACKAGE_VERSION=$DATAFLOW_VERSION >> $GITHUB_ENV + fi + popd + outputs: + dataflow-version: ${{ env.DATAFLOW_VERSION }} + skipper-version: ${{ env.SKIPPER_VERSION }} + package-version: ${{ env.PACKAGE_VERSION }} + publish: + name: Publish + needs: + - prepare + uses: ./.github/workflows/common-carvel.yml + with: + package-name: 'scdf' + package-version: ${{ needs.prepare.outputs.package-version }} + server-repository: 'springcloud/spring-cloud-dataflow-server' + skipper-repository: 'springcloud/spring-cloud-skipper-server' + package-bundle: 'springcloud/scdf-oss-package' + repository-bundle: 'springcloud/scdf-oss-repo' + dataflow-version: ${{ needs.prepare.outputs.dataflow-version }} + server-version: ${{ needs.prepare.outputs.dataflow-version }} + ctr-version: ${{ needs.prepare.outputs.dataflow-version }} + skipper-version: ${{ needs.prepare.outputs.skipper-version }} + secrets: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/ci-images.yml b/.github/workflows/ci-images.yml new file mode 100644 index 0000000000..9edc39cade --- /dev/null +++ b/.github/workflows/ci-images.yml @@ -0,0 +1,50 @@ +name: CI - Images + +on: + workflow_dispatch: + inputs: + version: + type: string + description: 'Version to build' + required: false + delete-tags: + required: false + type: boolean + default: false + description: 'Delete the image tags' + +jobs: + prepare: + name: Prepare Build + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + if: ${{ inputs.version == null || inputs.version == '' }} + - uses: actions/setup-java@v3 + if: ${{ inputs.version == null || inputs.version == '' }} + with: + java-version: '8' + distribution: 'liberica' + - name: Version from POM + if: ${{ inputs.version == null || inputs.version == '' }} + shell: bash + run: echo "VERSIONS=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout)" >> $GITHUB_ENV + - name: Version from Input + if: ${{ inputs.version != null && inputs.version != '' }} + shell: bash + run: echo "VERSION=${{ inputs.version }}" >> $GITHUB_ENV + outputs: + version: ${{ env.VERSION }} + images: + name: Build and Publish Images + needs: [ prepare ] + uses: ./.github/workflows/build-images.yml + with: + version: ${{ needs.prepare.outputs.version }} + delete-tags: ${{ inputs.delete-tags == 'true' }} + secrets: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + GCR_JSON_KEY: ${{ secrets.GCR_JSON_KEY }} + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} \ No newline at end of file diff --git a/.github/workflows/ci-it-db.yml b/.github/workflows/ci-it-db.yml new file mode 100644 index 0000000000..6ac4771060 --- /dev/null +++ b/.github/workflows/ci-it-db.yml @@ -0,0 +1,129 @@ +name: CI IT Database + +on: + workflow_dispatch: + schedule: + - cron: '0 6 * * 1-5' + +jobs: + integration-test: + name: DB IT on ${{ matrix.group }} + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + group: [mariadb, postgres] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 + with: + java-version: '8' + distribution: 'liberica' + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: 'Action: Run Db IT' + env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + run: | + mvn clean install -DskipTests -T 1C -s .settings.xml -pl spring-cloud-dataflow-server -am + mvn \ + -s .settings.xml \ + -pl spring-cloud-dataflow-server \ + -Dgroups=${{ matrix.group }} \ + -Pfailsafe \ + --batch-mode \ + test + - name: Integration Test Report for ${{ matrix.group }} + id: test_report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} + with: + name: Integration Tests - ${{ matrix.group }} + path: '**/surefire-reports/*.xml' + reporter: java-junit + list-tests: failed + - name: Publish Test Url for ${{ matrix.group }} + shell: bash + run: | + echo "::info ::Test report for ${{ matrix.group }} published at ${{ steps.test_report.outputs.url_html }}" + - name: 'Action: Upload Unit Test Results' + if: ${{ success() || failure() }} + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.group }}-test-results-surefire + path: './**/target/surefire-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore + - name: 'Action: Upload Integration Test Results' + if: ${{ success() || failure() }} + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.group }}-test-results-failsafe + path: './**/target/failsafe-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore + db2-tests: + name: DB2 Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 + with: + java-version: '8' + distribution: 'liberica' + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: 'Action: Run DB2 Tests' + env: + ENABLE_DB2: 'true' + run: | + mvn clean install -DskipTests -T 1C -s .settings.xml -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server -am + mvn \ + -s .settings.xml \ + -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server \ + -Dgroups=DB2 \ + --batch-mode \ + test + - name: Test Report for DB2 + id: test_report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} + with: + name: Tests - DB2 + path: '**/surefire-reports/*.xml' + reporter: java-junit + list-tests: failed + - name: Publish Test Url for DB2 + shell: bash + run: | + echo "::info ::Test report for DB2 published at ${{ steps.test_report.outputs.url_html }}" + - name: 'Action: Upload Unit Test Results' + if: ${{ success() || failure() }} + uses: actions/upload-artifact@v3 + with: + name: DB2-test-results-surefire + path: './**/target/surefire-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore + completed: + runs-on: ubuntu-latest + needs: [ db2-tests, integration-test ] + steps: + - name: 'Done' + shell: bash + run: echo "::info ::Done" diff --git a/.github/workflows/ci-it-performance.yml b/.github/workflows/ci-it-performance.yml new file mode 100644 index 0000000000..b0edda59af --- /dev/null +++ b/.github/workflows/ci-it-performance.yml @@ -0,0 +1,64 @@ +name: CI IT Performance + +on: + workflow_dispatch: + schedule: + - cron: '0 6 * * 1' + +jobs: + test: + name: Performance IT + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 + with: + java-version: '8' + distribution: 'liberica' + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Run Performance IT + env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + run: | + mvn clean install -DskipTests -T 1C -s .settings.xml -pl spring-cloud-dataflow-server -am + mvn \ + -s .settings.xml \ + -pl spring-cloud-dataflow-server \ + -Dgroups=performance \ + -Pfailsafe \ + --batch-mode \ + test + - name: Test Report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} + with: + name: Integration Tests + path: '**/surefire-reports/*.xml' + reporter: java-junit + list-tests: failed + - name: 'Action: Upload Unit Test Results' + if: ${{ always() }} + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.group }}-test-results-surefire + path: './**/target/surefire-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore + - name: 'Action: Upload Integration Test Results' + if: ${{ always() }} + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.group }}-test-results-failsafe + path: './**/target/failsafe-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore diff --git a/.github/workflows/ci-it-security.yml b/.github/workflows/ci-it-security.yml new file mode 100644 index 0000000000..a675e01c79 --- /dev/null +++ b/.github/workflows/ci-it-security.yml @@ -0,0 +1,56 @@ +name: CI IT Security + +on: + workflow_dispatch: + schedule: + - cron: '0 6 * * 1' + +jobs: + test: + name: Security IT + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 + with: + java-version: '8' + distribution: 'liberica' + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Run Security IT + env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + run: | + mvn clean install -DskipTests -T 1C -s .settings.xml -pl spring-cloud-dataflow-server -am + mvn \ + -s .settings.xml \ + -pl spring-cloud-dataflow-server \ + -Dgroups=oauth \ + -Pfailsafe \ + --batch-mode \ + test + - name: Test Report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} + with: + name: Integration Tests + path: '**/surefire-reports/*IT.xml' + reporter: java-junit + list-tests: failed + - name: 'Action: Upload Integration Test Results' + if: ${{ always() }} + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.group }}-test-results-failsafe + path: './**/target/failsafe-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore diff --git a/.github/workflows/ci-pr.yml b/.github/workflows/ci-pr.yml index d87ef78c82..b145df42af 100644 --- a/.github/workflows/ci-pr.yml +++ b/.github/workflows/ci-pr.yml @@ -3,31 +3,42 @@ name: CI PRs on: pull_request: +env: + TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db,aquasec/trivy-db,ghcr.io/aquasecurity/trivy-db + TRIVY_JAVA_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-java-db,aquasec/trivy-java-db,ghcr.io/aquasecurity/trivy-java-db + jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - # cache maven repo - - uses: actions/cache@v2 - with: - path: ~/.m2/repository - key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} - restore-keys: | - ${{ runner.os }}-m2- - # jdk8 - - uses: actions/setup-java@v1 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 with: - java-version: 1.8 - # maven version - - uses: jvalkeal/setup-maven@v1 - with: - maven-version: 3.6.2 - # build + java-version: '8' + distribution: 'liberica' - name: Build + shell: bash + timeout-minutes: 75 run: | - mvn -U -B clean package - # clean m2 cache - - name: Clean cache - run: | - find ~/.m2/repository -type d -name '*SNAPSHOT' | xargs rm -fr + ./mvnw -B -s .github/settings.xml -Pdocs clean install + scan: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Run Trivy vulnerability scanner in repo mode + uses: aquasecurity/trivy-action@master + with: + scan-type: 'fs' + ignore-unfixed: true + format: 'table' + severity: 'CRITICAL,HIGH' + - name: 'Scanned' + shell: bash + run: echo "::info ::Scanned" + done: + runs-on: ubuntu-latest + needs: [ scan, build ] + steps: + - name: 'Done' + shell: bash + run: echo "::info ::Done" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a6971de34d..f3f16e9f97 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,67 +2,219 @@ name: CI on: workflow_dispatch: + inputs: + enableSecurityScan: + type: boolean + default: true + description: 'Enable security scan with Trivy' push: + branches: + - '2.11.x' paths-ignore: - '.github/**' +env: + MAVEN_THREADS: '-T 1' + TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db,aquasec/trivy-db,ghcr.io/aquasecurity/trivy-db + TRIVY_JAVA_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-java-db,aquasec/trivy-java-db,ghcr.io/aquasecurity/trivy-java-db + jobs: build: if: github.repository_owner == 'spring-cloud' runs-on: ubuntu-latest + concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true steps: - - uses: actions/checkout@v2 - # cache maven repo - - uses: actions/cache@v2 + - uses: actions/checkout@v4 +# cache maven repo + - uses: actions/cache@v3 with: path: ~/.m2/repository key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: | ${{ runner.os }}-m2- - # jdk8 - - uses: actions/setup-java@v1 +# jdk8 + - uses: actions/setup-java@v3 with: - java-version: 1.8 - # maven version + java-version: '8' + distribution: 'liberica' - uses: jvalkeal/setup-maven@v1 with: - maven-version: 3.6.2 - # jfrog cli - - uses: jfrog/setup-jfrog-cli@v1 - with: - version: 1.46.4 + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' +# jfrog cli + - uses: jfrog/setup-jfrog-cli@v3 env: - JF_ARTIFACTORY_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} - # setup frog cli + JF_URL: 'https://repo.spring.io' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} +# setup frog cli - name: Configure JFrog Cli run: | jfrog rt mvnc \ - --server-id-resolve=repo.spring.io \ - --server-id-deploy=repo.spring.io \ - --repo-resolve-releases=libs-release \ + --server-id-resolve=${{ vars.JF_SERVER_ID }} \ + --server-id-deploy=${{ vars.JF_SERVER_ID }} \ + --repo-resolve-releases=libs-milestone \ --repo-resolve-snapshots=libs-snapshot \ - --repo-deploy-releases=release \ - --repo-deploy-snapshots=snapshot - echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-main >> $GITHUB_ENV + --repo-deploy-releases=libs-release-local \ + --repo-deploy-snapshots=libs-snapshot-local + echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-2-11-x >> $GITHUB_ENV echo JFROG_CLI_BUILD_NUMBER=$GITHUB_RUN_NUMBER >> $GITHUB_ENV echo spring_cloud_dataflow_version=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) >> $GITHUB_ENV - # build and publish + - uses: ./.github/actions/install-xmlutils +# build and publish - name: Build and Publish + shell: bash + timeout-minutes: 75 run: | - jfrog rt mvn clean install \ - -Pfull \ - -U -B + mvn clean + ./spring-cloud-dataflow-package/set-package-version.sh + jfrog rt mvn install -Pfull,docs -B + jfrog rt mvn install -pl spring-cloud-dataflow-package -B jfrog rt build-publish - - # build and publish images via composite action - - name: Build and Publish Images - uses: ./.github/actions/build-images + export JFROG_CLI_BUILD_NAME="${JFROG_CLI_BUILD_NAME/spring-cloud-dataflow/spring-cloud-skipper}" + PROJECT_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) +# set +e +# echo "::info ::Project version=$PROJECT_VERSION" +# SKIPPER_DOCS_PATTERN=$(.github/workflows/skipper-docs-name.sh $PROJECT_VERSION libs-snapshot-local) +# if [[ "$SKIPPER_DOCS_PATTERN" == *"does not exist"* ]]; then +# echo "::error ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" +# else +# echo "::info ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" +# jfrog rt sp --build "$SKIPPER_DOCS_PATTERN" "buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" +# echo "::info ::Skipper Docs Set Properties buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" +# fi + - name: Test Report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} with: - version: ${{ env.spring_cloud_dataflow_version }} - dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }} - dockerhub-password: ${{ secrets.DOCKERHUB_TOKEN }} - - # clean m2 cache + name: Unit Tests + path: '**/surefire-reports/*.xml' + reporter: java-junit + list-tests: failed + - name: Capture Test Results + if: ${{ always() }} + uses: actions/upload-artifact@v3 + with: + name: test-results + path: '**/target/surefire-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore +# clean m2 cache + - name: Clean cache + run: | + find ~/.m2/repository -type d -name '*SNAPSHOT' | xargs rm -fr + outputs: + version: ${{ env.spring_cloud_dataflow_version }} + database-tests: + if: github.repository_owner == 'spring-cloud' + runs-on: ubuntu-latest + strategy: + matrix: + db: [ 'ORACLE', 'DB2' ] + steps: + - uses: actions/checkout@v4 + - uses: actions/cache@v3 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-m2-${{ matrix.db }} + - uses: actions/setup-java@v3 + with: + java-version: '8' + distribution: 'liberica' + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: 'https://repo.spring.io' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Configure JFrog Cli + run: | + jfrog rt mvnc \ + --server-id-resolve=${{ vars.JF_SERVER_ID }} \ + --server-id-deploy=${{ vars.JF_SERVER_ID }} \ + --repo-resolve-releases=libs-milestone \ + --repo-resolve-snapshots=libs-snapshot \ + --repo-deploy-releases=libs-release-local \ + --repo-deploy-snapshots=libs-snapshot-local + - uses: ./.github/actions/install-xmlutils + - name: Test + shell: bash + timeout-minutes: 75 + run: | + jfrog rt mvn clean install -s .settings.xml -DskipTests -am -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server + export ENABLE_${{ matrix.db }}=true + jfrog rt mvn test -s .settings.xml -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server -Dgroups=${{ matrix.db }} + - name: Test Report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} + with: + name: Unit Tests + path: '**/surefire-reports/*.xml' + reporter: java-junit + list-tests: failed + - name: Capture Test Results + if: ${{ always() }} + uses: actions/upload-artifact@v3 + with: + name: test-results + path: '**/target/surefire-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore + # clean m2 cache - name: Clean cache run: | find ~/.m2/repository -type d -name '*SNAPSHOT' | xargs rm -fr + + images: + name: Build and Publish Images + needs: + - build + uses: ./.github/workflows/build-images.yml + with: + version: ${{ needs.build.outputs.version }} + secrets: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + GCR_JSON_KEY: ${{ secrets.GCR_JSON_KEY }} + scan: + runs-on: ubuntu-latest + if: ${{ inputs.enableSecurityScan == null || inputs.enableSecurityScan }} + steps: + - uses: actions/checkout@v4 + - name: Run Trivy vulnerability scanner in repo mode + uses: aquasecurity/trivy-action@master + with: + scan-type: 'fs' + ignore-unfixed: true + format: 'sarif' + output: 'trivy-results.sarif' + severity: 'CRITICAL,HIGH' + - name: Upload Trivy scan results to GitHub Security tab + uses: github/codeql-action/upload-sarif@v2 + with: + sarif_file: 'trivy-results.sarif' + - name: 'Scanned' + shell: bash + run: echo "::info ::Scanned" + done: + runs-on: ubuntu-latest + needs: [ scan, build, images, database-tests ] + steps: + - name: 'Done' + shell: bash + run: echo "::info ::Done" diff --git a/.github/workflows/common-carvel.yml b/.github/workflows/common-carvel.yml new file mode 100644 index 0000000000..b514657aed --- /dev/null +++ b/.github/workflows/common-carvel.yml @@ -0,0 +1,115 @@ +name: common-carvel + +on: + workflow_call: + inputs: + package-name: + type: string + description: 'Package Name' + required: true + package-version: + type: string + description: 'Package Version' + required: true + package-bundle: + type: string + description: 'Package Bundle name' + required: true + repository-bundle: + type: string + description: 'Repository Bundle name' + required: true + dataflow-version: + type: string + description: 'Spring Cloud Data Flow Container Version' + required: true + server-version: + type: string + description: 'Server Version' + required: true + skipper-version: + type: string + description: 'Spring Cloud Skipper Container Version' + required: true + ctr-version: + type: string + description: 'Composed Task Runner Container Version' + required: true + server-repository: + type: string + description: 'Docker repo for Data Flow Server' + required: true + skipper-repository: + type: string + description: 'Docker repo for Skipper Server' + required: true + secrets: + DOCKERHUB_USERNAME: + DOCKERHUB_TOKEN: + +jobs: + # test templating before publishing a package + publish: + name: Publish + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Ensure scripts are executable + shell: bash + run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - uses: carvel-dev/setup-action@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + # Builds a package bundle + - name: Build Package Bundle + id: build-package-bundle + uses: ./.github/actions/build-package-bundle + with: + dataflow-version: ${{ inputs.dataflow-version }} + skipper-version: ${{ inputs.skipper-version }} + skipper-repository: ${{ inputs.skipper-repository }} + server-version: ${{ inputs.server-version }} + server-repository: ${{ inputs.server-repository }} + ctr-version: ${{ inputs.ctr-version }} + package-name: ${{ inputs.package-name }} + package-bundle-template: 'src/carvel/templates/bundle/package' + imgpkg-lock-template: 'src/carvel/templates/imgpkg' + config: 'src/carvel/config' + project-directory: '.' + + # Publishes scdf package bundle as it needs to be in place + # before repository bundle can be created. + - name: Publish Package Bundle + id: publish-package-bundle + uses: ./.github/actions/publish-bundle + with: + path: '${{ steps.build-package-bundle.outputs.bundle-path }}' + repository: ${{ inputs.package-bundle }} + version: ${{ inputs.package-version }} + project-directory: '.' + + # Builds a repository bundle + - name: Build Repository Bundle + id: build-repository-bundle + uses: ./.github/actions/build-repository-bundle + with: + version: ${{ inputs.package-version }} + repo-bundle-template: 'src/carvel/templates/bundle/repo' + package-bundle-repository: ${{ inputs.package-bundle }} + package-name: ${{ inputs.package-name }} + project-directory: '.' + + # Publishes scdf repo bundle + - name: Publish Repository Bundle + id: publish-repository-bundle + uses: ./.github/actions/publish-bundle + with: + path: '${{ steps.build-repository-bundle.outputs.bundle-path }}' + repository: ${{ inputs.repository-bundle }} + version: ${{ inputs.package-version }} + project-directory: '.' diff --git a/.github/workflows/docker-rm-tag.sh b/.github/workflows/docker-rm-tag.sh new file mode 100755 index 0000000000..0e3635c156 --- /dev/null +++ b/.github/workflows/docker-rm-tag.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +# springcloud/spring-cloud-skipper-server +IMAGE=$1 +# 2.11.0 +# 2.11.0-jdk8 +# 2.11.0-jdk11 +# 2.11.0-jdk17 +TAG=$2 + +login_data() { +cat < '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +# This is normally unused +# shellcheck disable=SC2034 +APP_BASE_NAME=${0##*/} +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/.github/workflows/download-jar/gradlew.bat b/.github/workflows/download-jar/gradlew.bat new file mode 100644 index 0000000000..93e3f59f13 --- /dev/null +++ b/.github/workflows/download-jar/gradlew.bat @@ -0,0 +1,92 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%"=="" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if %ERRORLEVEL% equ 0 goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if %ERRORLEVEL% equ 0 goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/.github/workflows/download-jar/settings.gradle b/.github/workflows/download-jar/settings.gradle new file mode 100644 index 0000000000..b1256fe4e6 --- /dev/null +++ b/.github/workflows/download-jar/settings.gradle @@ -0,0 +1,10 @@ +/* + * This file was generated by the Gradle 'init' task. + * + * The settings file is used to specify which projects to include in your build. + * + * Detailed information about configuring a multi-project build in Gradle can be found + * in the user manual at https://docs.gradle.org/7.6.2/userguide/multi_project_builds.html + */ + +rootProject.name = 'download-maven' diff --git a/.github/workflows/export-app-versions.sh b/.github/workflows/export-app-versions.sh new file mode 100755 index 0000000000..8f50d73d39 --- /dev/null +++ b/.github/workflows/export-app-versions.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +set +e +DATAFLOW_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) +RC=$? +if ((RC!=0)); then + echo "DATAFLOW_VERSION=$DATAFLOW_VERSION" + exit $RC +fi +echo "DATAFLOW_VERSION=$DATAFLOW_VERSION" +SKIPPER_VERSION=$(mvn help:evaluate -Dexpression=spring-cloud-skipper.version -pl spring-cloud-dataflow-parent -q -DforceStdout) +if [[ "$SKIPPER_VERSION" = *"ERROR"* ]]; then + SKIPPER_VERSION=$(mvn help:evaluate -Dexpression=spring-cloud-skipper.version -q -DforceStdout) +fi +RC=$? +if ((RC!=0)); then + echo "SKIPPER_VERSION=$SKIPPER_VERSION" + exit $RC +fi +echo "SKIPPER_VERSION=$SKIPPER_VERSION" +export DATAFLOW_VERSION +export SKIPPER_VERSION diff --git a/.github/workflows/fix-deployment-files.yml b/.github/workflows/fix-deployment-files.yml index 65a6fad09a..429efb209a 100644 --- a/.github/workflows/fix-deployment-files.yml +++ b/.github/workflows/fix-deployment-files.yml @@ -3,7 +3,7 @@ name: Fix Deployment Files on: push: branches: - - main + - 2.11.x jobs: build: @@ -14,12 +14,16 @@ jobs: os: [ubuntu-latest] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - - name: Set up JDK 1.8 - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: - java-version: 1.8 + java-version: '8' + distribution: 'liberica' + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' - name: Build with Maven run: mvn process-resources -P deploymentfiles @@ -40,4 +44,3 @@ jobs: assignees: ilayaperumalg reviewers: jvalkeal,ilayaperumalg branch: github-actions/update-deployment-files - diff --git a/.github/workflows/generate-release-notes.sh b/.github/workflows/generate-release-notes.sh new file mode 100755 index 0000000000..a50aea60f0 --- /dev/null +++ b/.github/workflows/generate-release-notes.sh @@ -0,0 +1,91 @@ +#!/usr/bin/env bash +RUNNER_TMP="$1" +DEPLOYER_VERSION="$2" +DATAFLOW_UI_VERSION="$3" +DATAFLOW_VERSION="$4" +if [ "$4" == "" ]; then + echo "Usage: " + exit 1 +fi +RELEASE_NOTES_FILE="$RUNNER_TMP/release_notes.md" +RELEASE_NOTES_DATA="$RUNNER_TMP/release_notes_data.json" +RELEASE_NOTES_HEADERS1="$RUNNER_TMP/release_notes_headers1.json" +RELEASE_NOTES_HEADERS2="$RUNNER_TMP/release_notes_headers2.json" +RELEASE_NOTES_HEADERS3="$RUNNER_TMP/release_notes_headers3.json" +RELEASE_NOTES_FOOTERS1="$RUNNER_TMP/release_notes_footers1.json" +RELEASE_NOTES_FOOTERS2="$RUNNER_TMP/release_notes_footers2.json" +RELEASE_NOTES_FOOTERS3="$RUNNER_TMP/release_notes_footers3.json" +RELEASE_NOTES_ISSUES1="$RUNNER_TMP/release_notes_issues1.json" +RELEASE_NOTES_ISSUES2="$RUNNER_TMP/release_notes_issues2.json" +RELEASE_NOTES_ISSUES3="$RUNNER_TMP/release_notes_issues3.json" +RELEASE_NOTES_PROJECT1="$RUNNER_TMP/release_notes_project1.json" +RELEASE_NOTES_PROJECT2="$RUNNER_TMP/release_notes_project2.json" +RELEASE_NOTES_PROJECT3="$RUNNER_TMP/release_notes_project3.json" +echo "Retrieving headers" +gh issue list --repo spring-cloud/spring-cloud-deployer \ + --search milestone:$DEPLOYER_VERSION \ + --label automation/rlnotes-header \ + --state all --json title,body \ + --jq '{headers:map(.),headerslength:(length)}' \ + > $RELEASE_NOTES_HEADERS1 +gh issue list --repo spring-cloud/spring-cloud-dataflow-ui \ + --search milestone:$DATAFLOW_UI_VERSION \ + --label automation/rlnotes-header \ + --state all --json title,body \ + --jq '{headers:map(.),headerslength:(length)}' \ + > $RELEASE_NOTES_HEADERS2 +gh issue list --repo spring-cloud/spring-cloud-dataflow \ + --search milestone:$DATAFLOW_VERSION \ + --label automation/rlnotes-header \ + --state all --json title,body \ + --jq '{headers:map(.),headerslength:(length)}' \ + > $RELEASE_NOTES_HEADERS3 +echo "Retrieving footers" +gh issue list --repo spring-cloud/spring-cloud-deployer \ + --search milestone:$DEPLOYER_VERSION \ + --label automation/rlnotes-footer \ + --state all --json title,body \ + --jq '{footers:map(.),footerslength:(length)}' \ + > $RELEASE_NOTES_FOOTERS1 +gh issue list --repo spring-cloud/spring-cloud-dataflow-ui \ + --search milestone:$DATAFLOW_UI_VERSION \ + --label automation/rlnotes-footer \ + --state all --json title,body \ + --jq '{footers:map(.),footerslength:(length)}' \ + > $RELEASE_NOTES_FOOTERS2 +gh issue list --repo spring-cloud/spring-cloud-dataflow \ + --search milestone:$DATAFLOW_VERSION \ + --label automation/rlnotes-footer \ + --state all --json title,body \ + --jq '{footers:map(.),footerslength:(length)}' \ + > $RELEASE_NOTES_FOOTERS3 +echo "Creating project data" +echo "{\"name\":\"Spring Cloud Dataflow Deployer\",\"version\":\"$DEPLOYER_VERSION\"}" > $RELEASE_NOTES_PROJECT1 +echo "{\"name\":\"Spring Cloud Dataflow UI\",\"version\":\"$DATAFLOW_UI_VERSION\"}" > $RELEASE_NOTES_PROJECT2 +echo "{\"name\":\"Spring Cloud Dataflow\",\"version\":\"$DATAFLOW_VERSION\"}" > $RELEASE_NOTES_PROJECT3 + +echo "Retrieving issues" +gh issue list --repo spring-cloud/spring-cloud-deployer \ + --search milestone:$DEPLOYER_VERSION \ + --state all --json number,title,labels \ + --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-deployer"})}' \ + > $RELEASE_NOTES_ISSUES1 +gh issue list --repo spring-cloud/spring-cloud-dataflow-ui \ + --search milestone:$DATAFLOW_UI_VERSION \ + --state all --json number,title,labels \ + --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-dataflow-ui"})}' \ + > $RELEASE_NOTES_ISSUES2 +gh issue list --repo spring-cloud/spring-cloud-dataflow \ + --search milestone:$DATAFLOW_VERSION \ + --state all --limit 100 --json number,title,labels \ + --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-dataflow"})}' \ + > $RELEASE_NOTES_ISSUES3 +echo "Creating release notes data" +jq -s '{issues:(.[0].issues + .[1].issues + .[2].issues),headers:(.[3].headers + .[4].headers + .[5].headers),headerslength:(.[3].headerslength + .[4].headerslength + .[5].headerslength),footers:(.[6].footers + .[7].footers + .[8].footers), footerslength:(.[6].footerslength + .[7].footerslength + .[8].footerslength),projects:{spring_cloud_deployer:{name:"Spring Cloud Deployer",version:(.[9].version)},spring_cloud_skipper:{name:"Spring Cloud Skipper",version:(.[11].version)},spring_cloud_dataflow_ui:{name:"Spring Cloud Dataflow UI",version:(.[10].version)},spring_cloud_dataflow:{name:"Spring Cloud Dataflow",version:(.[11].version)}}}' \ + $RELEASE_NOTES_ISSUES1 $RELEASE_NOTES_ISSUES2 $RELEASE_NOTES_ISSUES3 \ + $RELEASE_NOTES_HEADERS1 $RELEASE_NOTES_HEADERS2 $RELEASE_NOTES_HEADERS3 \ + $RELEASE_NOTES_FOOTERS1 $RELEASE_NOTES_FOOTERS2 $RELEASE_NOTES_FOOTERS3 \ + $RELEASE_NOTES_PROJECT1 $RELEASE_NOTES_PROJECT2 $RELEASE_NOTES_PROJECT3 \ +> $RELEASE_NOTES_DATA +echo "Applying mustache templates" +mustache $RELEASE_NOTES_DATA .github/rlnotes.mustache > $RELEASE_NOTES_FILE diff --git a/.github/workflows/github-release.yml b/.github/workflows/github-release.yml index d11ed6e024..efd606e2e3 100644 --- a/.github/workflows/github-release.yml +++ b/.github/workflows/github-release.yml @@ -11,316 +11,60 @@ jobs: ghrelease: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - # zoo extract and ensure - - name: Extract Zoo Context Properties - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - dispatch-handler-extract-context-properties: true - ensure-env: | - BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_version - BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_version - BUILD_ZOO_HANDLER_spring_cloud_deployer_version - BUILD_ZOO_HANDLER_spring_cloud_deployer_local_version - BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_version - BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_version - BUILD_ZOO_HANDLER_spring_cloud_common_security_config_version - BUILD_ZOO_HANDLER_spring_cloud_skipper_version - BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version - BUILD_ZOO_HANDLER_spring_cloud_dataflow_version - # tooling - - name: Install Tooling - run: | - curl -sSL https://github.com/cbroglie/mustache/releases/download/v1.2.2/mustache_1.2.2_linux_amd64.tar.gz | sudo tar -C /usr/local/bin/ --no-same-owner -xzv mustache - # release notes and gh release - - name: Release - env: - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - RELEASE_NOTES_FILE: ${{runner.temp}}/release_notes.md5 - RELEASE_NOTES_DATA: ${{runner.temp}}/release_notes_data.json - RELEASE_NOTES_HEADERS1: ${{runner.temp}}/release_notes_headers1.json - RELEASE_NOTES_HEADERS2: ${{runner.temp}}/release_notes_headers2.json - RELEASE_NOTES_HEADERS3: ${{runner.temp}}/release_notes_headers3.json - RELEASE_NOTES_HEADERS4: ${{runner.temp}}/release_notes_headers4.json - RELEASE_NOTES_HEADERS5: ${{runner.temp}}/release_notes_headers5.json - RELEASE_NOTES_HEADERS6: ${{runner.temp}}/release_notes_headers6.json - RELEASE_NOTES_HEADERS7: ${{runner.temp}}/release_notes_headers7.json - RELEASE_NOTES_HEADERS8: ${{runner.temp}}/release_notes_headers8.json - RELEASE_NOTES_HEADERS9: ${{runner.temp}}/release_notes_headers9.json - RELEASE_NOTES_HEADERS10: ${{runner.temp}}/release_notes_headers10.json - RELEASE_NOTES_FOOTERS1: ${{runner.temp}}/release_notes_footers1.json - RELEASE_NOTES_FOOTERS2: ${{runner.temp}}/release_notes_footers2.json - RELEASE_NOTES_FOOTERS3: ${{runner.temp}}/release_notes_footers3.json - RELEASE_NOTES_FOOTERS4: ${{runner.temp}}/release_notes_footers4.json - RELEASE_NOTES_FOOTERS5: ${{runner.temp}}/release_notes_footers5.json - RELEASE_NOTES_FOOTERS6: ${{runner.temp}}/release_notes_footers6.json - RELEASE_NOTES_FOOTERS7: ${{runner.temp}}/release_notes_footers7.json - RELEASE_NOTES_FOOTERS8: ${{runner.temp}}/release_notes_footers8.json - RELEASE_NOTES_FOOTERS9: ${{runner.temp}}/release_notes_footers9.json - RELEASE_NOTES_FOOTERS10: ${{runner.temp}}/release_notes_footers10.json - RELEASE_NOTES_ISSUES1: ${{runner.temp}}/release_notes_issues1.json - RELEASE_NOTES_ISSUES2: ${{runner.temp}}/release_notes_issues2.json - RELEASE_NOTES_ISSUES3: ${{runner.temp}}/release_notes_issues3.json - RELEASE_NOTES_ISSUES4: ${{runner.temp}}/release_notes_issues4.json - RELEASE_NOTES_ISSUES5: ${{runner.temp}}/release_notes_issues5.json - RELEASE_NOTES_ISSUES6: ${{runner.temp}}/release_notes_issues6.json - RELEASE_NOTES_ISSUES7: ${{runner.temp}}/release_notes_issues7.json - RELEASE_NOTES_ISSUES8: ${{runner.temp}}/release_notes_issues8.json - RELEASE_NOTES_ISSUES9: ${{runner.temp}}/release_notes_issues9.json - RELEASE_NOTES_ISSUES10: ${{runner.temp}}/release_notes_issues10.json - RELEASE_NOTES_PROJECT1: ${{runner.temp}}/release_notes_project1.json - RELEASE_NOTES_PROJECT2: ${{runner.temp}}/release_notes_project2.json - RELEASE_NOTES_PROJECT3: ${{runner.temp}}/release_notes_project3.json - RELEASE_NOTES_PROJECT4: ${{runner.temp}}/release_notes_project4.json - RELEASE_NOTES_PROJECT5: ${{runner.temp}}/release_notes_project5.json - RELEASE_NOTES_PROJECT6: ${{runner.temp}}/release_notes_project6.json - RELEASE_NOTES_PROJECT7: ${{runner.temp}}/release_notes_project7.json - RELEASE_NOTES_PROJECT8: ${{runner.temp}}/release_notes_project8.json - RELEASE_NOTES_PROJECT9: ${{runner.temp}}/release_notes_project9.json - RELEASE_NOTES_PROJECT10: ${{runner.temp}}/release_notes_project10.json - run: | - gh issue list \ - --repo spring-cloud/spring-cloud-dataflow-build \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_version \ - --label automation/rlnotes-header \ - --state all --json title,body \ - --jq '{headers:map(.),headerslength:(length)}' \ - > $RELEASE_NOTES_HEADERS1 - gh issue list \ - --repo spring-cloud/spring-cloud-dataflow-common \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_version \ - --label automation/rlnotes-header \ - --state all --json title,body \ - --jq '{headers:map(.),headerslength:(length)}' \ - > $RELEASE_NOTES_HEADERS2 - gh issue list \ - --repo spring-cloud/spring-cloud-deployer \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_deployer_version \ - --label automation/rlnotes-header \ - --state all --json title,body \ - --jq '{headers:map(.),headerslength:(length)}' \ - > $RELEASE_NOTES_HEADERS3 - gh issue list \ - --repo spring-cloud/spring-cloud-deployer-local \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_deployer_local_version \ - --label automation/rlnotes-header \ - --state all --json title,body \ - --jq '{headers:map(.),headerslength:(length)}' \ - > $RELEASE_NOTES_HEADERS4 - gh issue list \ - --repo spring-cloud/spring-cloud-deployer-cloudfoundry \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_version \ - --label automation/rlnotes-header \ - --state all --json title,body \ - --jq '{headers:map(.),headerslength:(length)}' \ - > $RELEASE_NOTES_HEADERS5 - gh issue list \ - --repo spring-cloud/spring-cloud-deployer-kubernetes \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_version \ - --label automation/rlnotes-header \ - --state all --json title,body \ - --jq '{headers:map(.),headerslength:(length)}' \ - > $RELEASE_NOTES_HEADERS6 - gh issue list \ - --repo spring-cloud/spring-cloud-common-security-config \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_common_security_config_version \ - --label automation/rlnotes-header \ - --state all --json title,body \ - --jq '{headers:map(.),headerslength:(length)}' \ - > $RELEASE_NOTES_HEADERS7 - gh issue list \ - --repo spring-cloud/spring-cloud-skipper \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_skipper_version \ - --label automation/rlnotes-header \ - --state all --json title,body \ - --jq '{headers:map(.),headerslength:(length)}' \ - > $RELEASE_NOTES_HEADERS8 - gh issue list \ - --repo spring-cloud/spring-cloud-dataflow-ui \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version \ - --label automation/rlnotes-header \ - --state all --json title,body \ - --jq '{headers:map(.),headerslength:(length)}' \ - > $RELEASE_NOTES_HEADERS9 - gh issue list \ - --repo spring-cloud/spring-cloud-dataflow \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_dataflow_version \ - --label automation/rlnotes-header \ - --state all --json title,body \ - --jq '{headers:map(.),headerslength:(length)}' \ - > $RELEASE_NOTES_HEADERS10 - gh issue list \ - --repo spring-cloud/spring-cloud-dataflow-build \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_version \ - --label automation/rlnotes-footer \ - --state all --json title,body \ - --jq '{footers:map(.),footerslength:(length)}' \ - > $RELEASE_NOTES_FOOTERS1 - gh issue list \ - --repo spring-cloud/spring-cloud-dataflow-common \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_version \ - --label automation/rlnotes-footer \ - --state all --json title,body \ - --jq '{footers:map(.),footerslength:(length)}' \ - > $RELEASE_NOTES_FOOTERS2 - gh issue list \ - --repo spring-cloud/spring-cloud-deployer \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_deployer_version \ - --label automation/rlnotes-footer \ - --state all --json title,body \ - --jq '{footers:map(.),footerslength:(length)}' \ - > $RELEASE_NOTES_FOOTERS3 - gh issue list \ - --repo spring-cloud/spring-cloud-deployer-local \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_deployer_local_version \ - --label automation/rlnotes-footer \ - --state all --json title,body \ - --jq '{footers:map(.),footerslength:(length)}' \ - > $RELEASE_NOTES_FOOTERS4 - gh issue list \ - --repo spring-cloud/spring-cloud-deployer-cloudfoundry \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_version \ - --label automation/rlnotes-footer \ - --state all --json title,body \ - --jq '{footers:map(.),footerslength:(length)}' \ - > $RELEASE_NOTES_FOOTERS5 - gh issue list \ - --repo spring-cloud/spring-cloud-deployer-kubernetes \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_version \ - --label automation/rlnotes-footer \ - --state all --json title,body \ - --jq '{footers:map(.),footerslength:(length)}' \ - > $RELEASE_NOTES_FOOTERS6 - gh issue list \ - --repo spring-cloud/spring-cloud-common-security-config \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_common_security_config_version \ - --label automation/rlnotes-footer \ - --state all --json title,body \ - --jq '{footers:map(.),footerslength:(length)}' \ - > $RELEASE_NOTES_FOOTERS7 - gh issue list \ - --repo spring-cloud/spring-cloud-skipper \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_skipper_version \ - --label automation/rlnotes-footer \ - --state all --json title,body \ - --jq '{footers:map(.),footerslength:(length)}' \ - > $RELEASE_NOTES_FOOTERS8 - gh issue list \ - --repo spring-cloud/spring-cloud-dataflow-ui \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version \ - --label automation/rlnotes-footer \ - --state all --json title,body \ - --jq '{footers:map(.),footerslength:(length)}' \ - > $RELEASE_NOTES_FOOTERS9 - gh issue list \ - --repo spring-cloud/spring-cloud-dataflow \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_dataflow_version \ - --label automation/rlnotes-footer \ - --state all --json title,body \ - --jq '{footers:map(.),footerslength:(length)}' \ - > $RELEASE_NOTES_FOOTERS10 - echo "{\"name\":\"Spring Cloud Dataflow Build\",\"version\":\"$BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_version\"}" > $RELEASE_NOTES_PROJECT1 - echo "{\"name\":\"Spring Cloud Dataflow Common\",\"version\":\"$BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_version\"}" > $RELEASE_NOTES_PROJECT2 - echo "{\"name\":\"Spring Cloud Dataflow Deployer\",\"version\":\"$BUILD_ZOO_HANDLER_spring_cloud_deployer_version\"}" > $RELEASE_NOTES_PROJECT3 - echo "{\"name\":\"Spring Cloud Dataflow Deployer Local\",\"version\":\"$BUILD_ZOO_HANDLER_spring_cloud_deployer_local_version\"}" > $RELEASE_NOTES_PROJECT4 - echo "{\"name\":\"Spring Cloud Dataflow Deployer CF\",\"version\":\"$BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_version\"}" > $RELEASE_NOTES_PROJECT5 - echo "{\"name\":\"Spring Cloud Dataflow Deployer K8S\",\"version\":\"$BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_version\"}" > $RELEASE_NOTES_PROJECT6 - echo "{\"name\":\"Spring Cloud Common Security Config\",\"version\":\"$BUILD_ZOO_HANDLER_spring_cloud_common_security_config_version\"}" > $RELEASE_NOTES_PROJECT7 - echo "{\"name\":\"Spring Cloud Skipper\",\"version\":\"$BUILD_ZOO_HANDLER_spring_cloud_skipper_version\"}" > $RELEASE_NOTES_PROJECT8 - echo "{\"name\":\"Spring Cloud Dataflow UI\",\"version\":\"$BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version\"}" > $RELEASE_NOTES_PROJECT9 - echo "{\"name\":\"Spring Cloud Dataflow\",\"version\":\"$BUILD_ZOO_HANDLER_spring_cloud_dataflow_version\"}" > $RELEASE_NOTES_PROJECT10 - gh issue list \ - --repo spring-cloud/spring-cloud-dataflow-build \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_version \ - --state all --json number,title,labels \ - --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-dataflow-build"})}' \ - > $RELEASE_NOTES_ISSUES1 - gh issue list \ - --repo spring-cloud/spring-cloud-dataflow-common \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_version \ - --state all --json number,title,labels \ - --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-dataflow-common"})}' \ - > $RELEASE_NOTES_ISSUES2 - gh issue list \ - --repo spring-cloud/spring-cloud-deployer \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_deployer_version \ - --state all --json number,title,labels \ - --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-deployer"})}' \ - > $RELEASE_NOTES_ISSUES3 - gh issue list \ - --repo spring-cloud/spring-cloud-deployer-local \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_deployer_local_version \ - --state all --json number,title,labels \ - --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-deployer-local"})}' \ - > $RELEASE_NOTES_ISSUES4 - gh issue list \ - --repo spring-cloud/spring-cloud-deployer-cloudfoundry \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_version \ - --state all --json number,title,labels \ - --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-deployer-cloudfoundry"})}' \ - > $RELEASE_NOTES_ISSUES5 - gh issue list \ - --repo spring-cloud/spring-cloud-deployer-kubernetes \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_version \ - --state all --json number,title,labels \ - --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-deployer-kubernetes"})}' \ - > $RELEASE_NOTES_ISSUES6 - gh issue list \ - --repo spring-cloud/spring-cloud-common-security-config \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_common_security_config_version \ - --state all --json number,title,labels \ - --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-common-security-config"})}' \ - > $RELEASE_NOTES_ISSUES7 - gh issue list \ - --repo spring-cloud/spring-cloud-skipper \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_skipper_version \ - --state all --json number,title,labels \ - --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-skipper"})}' \ - > $RELEASE_NOTES_ISSUES8 - gh issue list \ - --repo spring-cloud/spring-cloud-dataflow-ui \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version \ - --state all --json number,title,labels \ - --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-dataflow-ui"})}' \ - > $RELEASE_NOTES_ISSUES9 - gh issue list \ - --repo spring-cloud/spring-cloud-dataflow \ - --search milestone:$BUILD_ZOO_HANDLER_spring_cloud_dataflow_version \ - --state all --json number,title,labels \ - --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-dataflow"})}' \ - > $RELEASE_NOTES_ISSUES10 - jq -s '{issues:(.[0].issues + .[1].issues + .[2].issues + .[3].issues + .[4].issues + .[5].issues + .[6].issues + .[7].issues + .[8].issues + .[9].issues),headers:(.[10].headers + .[11].headers + .[12].headers + .[13].headers + .[14].headers + .[15].headers + .[16].headers + .[17].headers + .[18].headers + .[19].headers),headerslength:(.[10].headerslength + .[11].headerslength + .[12].headerslength + .[13].headerslength + .[14].headerslength + .[15].headerslength + .[16].headerslength + .[17].headerslength + .[18].headerslength + .[19].headerslength),footers:(.[20].footers + .[21].footers + .[22].footers + .[23].footers + .[24].footers + .[25].footers + .[26].footers + .[27].footers + .[28].footers + .[29].footers), footerslength:(.[20].footerslength + .[21].footerslength + .[22].footerslength + .[23].footerslength + .[24].footerslength + .[25].footerslength + .[26].footerslength + .[27].footerslength + .[28].footerslength + .[29].footerslength),projects:{spring_cloud_dataflow_build:{name:"Spring Cloud Dataflow Build",version:(.[30].version)},spring_cloud_dataflow_common:{name:"Spring Cloud Dataflow Common",version:(.[31].version)},spring_cloud_deployer:{name:"Spring Cloud Deployer",version:(.[32].version)},spring_cloud_deployer_local:{name:"Spring Cloud Deployer Local",version:(.[33].version)},spring_cloud_deployer_cloudfoundry:{name:"Spring Cloud Deployer CF",version:(.[34].version)},spring_cloud_deployer_kubernetes:{name:"Spring Cloud Deployer K8S",version:(.[35].version)},spring_cloud_common_security_config:{name:"Spring Cloud Common Security Config",version:(.[36].version)},spring_cloud_skipper:{name:"Spring Cloud Skipper",version:(.[37].version)},spring_cloud_dataflow_ui:{name:"Spring Cloud Dataflow UI",version:(.[38].version)},spring_cloud_dataflow:{name:"Spring Cloud Dataflow",version:(.[39].version)}}}' \ - $RELEASE_NOTES_ISSUES1 $RELEASE_NOTES_ISSUES2 $RELEASE_NOTES_ISSUES3 $RELEASE_NOTES_ISSUES4 $RELEASE_NOTES_ISSUES5 \ - $RELEASE_NOTES_ISSUES6 $RELEASE_NOTES_ISSUES7 $RELEASE_NOTES_ISSUES8 $RELEASE_NOTES_ISSUES9 $RELEASE_NOTES_ISSUES10 \ - $RELEASE_NOTES_HEADERS1 $RELEASE_NOTES_HEADERS2 $RELEASE_NOTES_HEADERS3 $RELEASE_NOTES_HEADERS4 $RELEASE_NOTES_HEADERS5 \ - $RELEASE_NOTES_HEADERS6 $RELEASE_NOTES_HEADERS7 $RELEASE_NOTES_HEADERS8 $RELEASE_NOTES_HEADERS9 $RELEASE_NOTES_HEADERS10 \ - $RELEASE_NOTES_FOOTERS1 $RELEASE_NOTES_FOOTERS2 $RELEASE_NOTES_FOOTERS3 $RELEASE_NOTES_FOOTERS4 $RELEASE_NOTES_FOOTERS5 \ - $RELEASE_NOTES_FOOTERS6 $RELEASE_NOTES_FOOTERS7 $RELEASE_NOTES_FOOTERS8 $RELEASE_NOTES_FOOTERS9 $RELEASE_NOTES_FOOTERS10 \ - $RELEASE_NOTES_PROJECT1 $RELEASE_NOTES_PROJECT2 $RELEASE_NOTES_PROJECT3 $RELEASE_NOTES_PROJECT4 $RELEASE_NOTES_PROJECT5 \ - $RELEASE_NOTES_PROJECT6 $RELEASE_NOTES_PROJECT7 $RELEASE_NOTES_PROJECT8 $RELEASE_NOTES_PROJECT9 $RELEASE_NOTES_PROJECT10 \ - > $RELEASE_NOTES_DATA - mustache $RELEASE_NOTES_DATA .github/rlnotes.mustache > $RELEASE_NOTES_FILE - gh release create v$BUILD_ZOO_HANDLER_spring_cloud_dataflow_version \ - --draft \ - --title "Spring Cloud Data Flow $BUILD_ZOO_HANDLER_spring_cloud_dataflow_version" \ - --notes-file $RELEASE_NOTES_FILE - # zoo success - - name: Notify Build Success Zoo Handler Controller - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} - dispatch-handler-client-payload-data: > - { - "event": "ghrelease-succeed" - } + - uses: actions/checkout@v4 + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + # zoo extract and ensure + - name: Extract Zoo Context Properties + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-extract-context-properties: true + ensure-env: | + BUILD_ZOO_HANDLER_spring_cloud_deployer_version + BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version + BUILD_ZOO_HANDLER_spring_cloud_dataflow_version + # tooling + - name: Install Tooling + run: | + curl -sSL https://github.com/cbroglie/mustache/releases/download/v1.4.0/mustache_1.4.0_linux_amd64.tar.gz | sudo tar -C /usr/local/bin/ --no-same-owner -xzv mustache + # release notes and gh release + - name: Release + shell: bash + env: + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + run: | + # generate release notes + .github/workflows/generate-release-notes.sh "${{ runner.temp }}" "$BUILD_ZOO_HANDLER_spring_cloud_deployer_version" "$BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version" "$BUILD_ZOO_HANDLER_spring_cloud_dataflow_version" + gh release create v$BUILD_ZOO_HANDLER_spring_cloud_dataflow_version \ + --draft \ + --title "Spring Cloud Data Flow $BUILD_ZOO_HANDLER_spring_cloud_dataflow_version" \ + --notes-file "${{runner.temp}}/release_notes.md" + # retrieve spring-cloud-dataflow-package and upload to github release + export PACKAGE_VERSION=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_version} + ./mvnw -s .settings.xml build-helper:parse-version versions:set -DskipResolution=true -DprocessAllModules=true -DgenerateBackupPoms=false -Dartifactory.publish.artifacts=false -DnewVersion=$PACKAGE_VERSION + ./spring-cloud-dataflow-package/set-package-version.sh + ./mvnw -s .settings.xml package -pl spring-cloud-dataflow-package + PACKAGE_FILE="./spring-cloud-dataflow-package/target/spring-cloud-dataflow-oss-install-${PACKAGE_VERSION}.zip" + gh release upload v$PACKAGE_VERSION $PACKAGE_FILE --clobber + # zoo success + - name: Notify Build Success Zoo Handler Controller + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "ghrelease-succeed" + } - # zoo failure - - name: Notify Build Failure Zoo Handler Controller - if: ${{ failure() }} - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - dispatch-handler-token: ${{ secrets.REPO_ACCESS_TOKEN }} - dispatch-handler-client-payload-data: > - { - "event": "ghrelease-failed" - } + # zoo failure + - name: Notify Build Failure Zoo Handler Controller + if: ${{ failure() }} + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.REPO_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "ghrelease-failed" + } diff --git a/.github/workflows/images.json b/.github/workflows/images.json new file mode 100644 index 0000000000..83652cd335 --- /dev/null +++ b/.github/workflows/images.json @@ -0,0 +1,34 @@ +{ + "include": [ + { + "name": "spring-cloud-dataflow-server", + "path": "spring-cloud-dataflow-server/target", + "image": "springcloud/spring-cloud-dataflow-server" + }, + { + "name": "spring-cloud-skipper-server", + "path": "spring-cloud-skipper/spring-cloud-skipper-server/target", + "image": "springcloud/spring-cloud-skipper-server" + }, + { + "name": "spring-cloud-dataflow-composed-task-runner", + "path": "spring-cloud-dataflow-composed-task-runner/target", + "image": "springcloud/spring-cloud-dataflow-composed-task-runner" + }, + { + "name": "spring-cloud-dataflow-single-step-batch-job", + "path": "spring-cloud-dataflow-single-step-batch-job/target", + "image": "springcloud/spring-cloud-dataflow-single-step-batch-job" + }, + { + "name": "spring-cloud-dataflow-tasklauncher-sink-kafka", + "path": "spring-cloud-dataflow-tasklauncher/spring-cloud-dataflow-tasklauncher-sink-kafka/target", + "image": "springcloud/spring-cloud-dataflow-tasklauncher-sink-kafka" + }, + { + "name": "spring-cloud-dataflow-tasklauncher-sink-rabbit", + "path": "spring-cloud-dataflow-tasklauncher/spring-cloud-dataflow-tasklauncher-sink-rabbit/target", + "image": "springcloud/spring-cloud-dataflow-tasklauncher-sink-rabbit" + } + ] +} diff --git a/.github/workflows/issue-handler.yml b/.github/workflows/issue-handler.yml index 4957d63db3..b704d3de1e 100644 --- a/.github/workflows/issue-handler.yml +++ b/.github/workflows/issue-handler.yml @@ -29,7 +29,10 @@ jobs: "cppwfs", "mminella", "dturanski", - "sobychacko" + "onobc", + "claudiahub", + "sobychacko", + "corneil" ] }, "recipes": [ diff --git a/.github/workflows/label-manage.yml b/.github/workflows/label-manage.yml deleted file mode 100644 index d83543e6e3..0000000000 --- a/.github/workflows/label-manage.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Labels Manage - -on: - push: - branches: - - 'main' - paths: - - '.github/labels-manage.yml' - - '.github/workflows/label-manage.yml' - workflow_dispatch: - -jobs: - labeler: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Mangle Labels - uses: crazy-max/ghaction-github-labeler@v3 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - yaml-file: .github/labels-manage.yml - dry-run: false - skip-delete: true diff --git a/.github/workflows/milestone-controller.yml b/.github/workflows/milestone-controller.yml index 7a7a44566a..36a4429857 100644 --- a/.github/workflows/milestone-controller.yml +++ b/.github/workflows/milestone-controller.yml @@ -29,30 +29,10 @@ jobs: { "if": "initial == true", "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-dataflow-build", - "ref": "main", - "workflow": "milestone-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-build' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-dataflow-common", - "ref": "main", - "workflow": "milestone-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-common' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", "workflow_dispatch": { "owner": "spring-cloud", "repo": "spring-cloud-deployer", - "ref": "main", + "ref": "2.9.x", "workflow": "milestone-worker.yml" } }, @@ -61,78 +41,38 @@ jobs: "action": "workflow_dispatch", "workflow_dispatch": { "owner": "spring-cloud", - "repo": "spring-cloud-deployer-local", - "ref": "main", - "workflow": "milestone-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer-local' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-deployer-cloudfoundry", - "ref": "main", - "workflow": "milestone-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer-cloudfoundry' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-deployer-kubernetes", - "ref": "main", - "workflow": "milestone-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer-kubernetes' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-common-security-config", - "ref": "main", - "workflow": "milestone-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-common-security-config' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-skipper", - "ref": "main", + "repo": "spring-cloud-dataflow-ui", + "ref": "3.4.x", "workflow": "milestone-worker.yml" } }, { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-skipper' && data.owner == 'spring-cloud'", + "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-ui' && data.owner == 'spring-cloud'", "action": "workflow_dispatch", "workflow_dispatch": { "owner": "spring-cloud", - "repo": "spring-cloud-dataflow-ui", - "ref": "main", + "repo": "spring-cloud-dataflow", + "ref": "2.11.x", "workflow": "milestone-worker.yml" } }, { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-ui' && data.owner == 'spring-cloud'", + "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", "action": "workflow_dispatch", "workflow_dispatch": { "owner": "spring-cloud", "repo": "spring-cloud-dataflow", - "ref": "main", - "workflow": "milestone-worker.yml" + "ref": "2.11.x", + "workflow": "carvel-worker.yml" } }, { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", + "if": "data.event == 'carvel-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", "action": "workflow_dispatch", "workflow_dispatch": { "owner": "spring-cloud", "repo": "spring-cloud-dataflow", - "ref": "main", + "ref": "2.11.x", "workflow": "promote-milestone.yml" } }, @@ -142,7 +82,7 @@ jobs: "workflow_dispatch": { "owner": "spring-cloud", "repo": "spring-cloud-dataflow", - "ref": "main", + "ref": "2.11.x", "workflow": "github-release.yml" } }, @@ -166,5 +106,12 @@ jobs: "fail": { "message": "something went wrong in build train" } + }, + { + "if": "data.event == 'carvel-failed'", + "action": "fail", + "fail": { + "message": "hi, something went wrong with carvel" + } } ] diff --git a/.github/workflows/milestone-worker.yml b/.github/workflows/milestone-worker.yml index a5a4e2c36f..2e8281e038 100644 --- a/.github/workflows/milestone-worker.yml +++ b/.github/workflows/milestone-worker.yml @@ -7,62 +7,58 @@ on: description: 'Build Zoo Handler Payload' required: true +env: + MAVEN_THREADS: '-T 0.5C' + jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v1 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 with: - java-version: 1.8 + java-version: '8' + distribution: 'liberica' - uses: jvalkeal/setup-maven@v1 with: - maven-version: 3.6.3 - - uses: jfrog/setup-jfrog-cli@v1 - with: - version: 1.46.4 + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - uses: jfrog/setup-jfrog-cli@v3 env: - JF_ARTIFACTORY_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} - - name: Install pack - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - pack-version: 0.18.0 - - uses: actions/cache@v2 + JF_URL: 'https://repo.spring.io' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + - uses: actions/cache@v3 with: path: ~/.m2/repository key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: | ${{ runner.os }}-m2- - # target deploy repos +# target deploy repos - name: Configure JFrog Cli run: | jfrog rt mvnc \ - --server-id-deploy=repo.spring.io \ - --repo-deploy-releases=libs-staging-local \ - --repo-deploy-snapshots=libs-staging-local - echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-main-milestone >> $GITHUB_ENV + --server-id-resolve=${{ vars.JF_SERVER_ID }} \ + --server-id-deploy=${{ vars.JF_SERVER_ID }} \ + --repo-resolve-releases=libs-milestone \ + --repo-resolve-snapshots=libs-snapshot \ + --repo-deploy-releases=libs-milestone-local \ + --repo-deploy-snapshots=libs-snapshot-local + echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-2-11-x-milestone >> $GITHUB_ENV echo JFROG_CLI_BUILD_NUMBER=$GITHUB_RUN_NUMBER >> $GITHUB_ENV - # zoo extract and ensure +# zoo extract and ensure - name: Extract Zoo Context Properties uses: jvalkeal/build-zoo-handler@v0.0.4 with: dispatch-handler-extract-context-properties: true ensure-env: | BUILD_ZOO_HANDLER_milestone_version - BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_version - BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_version BUILD_ZOO_HANDLER_spring_cloud_deployer_version - BUILD_ZOO_HANDLER_spring_cloud_deployer_local_version - BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_version - BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_version - BUILD_ZOO_HANDLER_spring_cloud_common_security_config_version - BUILD_ZOO_HANDLER_spring_cloud_skipper_version BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version - - # build and publish to configured target + - name: 'Install: xmllint' + uses: ./.github/actions/install-xmlutils - name: Build and Publish run: | jfrog rt mvn build-helper:parse-version versions:set \ @@ -73,20 +69,16 @@ jobs: -Dartifactory.publish.artifacts=false \ -DnewVersion='${parsedVersion.majorVersion}.${parsedVersion.minorVersion}.${parsedVersion.incrementalVersion}-'${BUILD_ZOO_HANDLER_milestone_version} \ -B - echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) >> $GITHUB_ENV - jfrog rt mvn versions:update-parent \ - -gs .github/settings.xml \ - -Pstagingmilestone \ - -DgenerateBackupPoms=false \ - -DparentVersion='['${BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_version}']' \ - -B + BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) >> $GITHUB_ENV + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_version >> $GITHUB_ENV + echo "::notice ::set dataflow.version=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_version}" jfrog rt mvn versions:set-property \ -gs .github/settings.xml \ -Pstagingmilestone \ -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-dataflow-common.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_version} \ - -B + -Dproperty=dataflow.version \ + -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_version} \ + -B jfrog rt mvn versions:set-property \ -gs .github/settings.xml \ -Pstagingmilestone \ @@ -94,41 +86,6 @@ jobs: -Dproperty=spring-cloud-deployer.version \ -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_version} \ -B - jfrog rt mvn versions:set-property \ - -gs .github/settings.xml \ - -Pstagingmilestone \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-deployer-local.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_local_version} \ - -B - jfrog rt mvn versions:set-property \ - -gs .github/settings.xml \ - -Pstagingmilestone \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-deployer-cloudfoundry.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_version} \ - -B - jfrog rt mvn versions:set-property \ - -gs .github/settings.xml \ - -Pstagingmilestone \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-deployer-kubernetes.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_version} \ - -B - jfrog rt mvn versions:set-property \ - -gs .github/settings.xml \ - -Pstagingmilestone \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-common-security-config.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_common_security_config_version} \ - -B - jfrog rt mvn versions:set-property \ - -gs .github/settings.xml \ - -Pstagingmilestone \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-skipper.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_skipper_version} \ - -B jfrog rt mvn versions:set-property \ -gs .github/settings.xml \ -Pstagingmilestone \ @@ -137,15 +94,26 @@ jobs: -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version} \ -B jfrog rt build-clean - jfrog rt mvn clean install \ - -gs .github/settings.xml \ - -P-spring,stagingmilestone,full,deploymentfiles \ - -DskipTests -U -B + ./spring-cloud-dataflow-package/set-package-version.sh + jfrog rt mvn -gs .github/settings.xml -Pstagingmilestone,full,deploymentfiles,docs -B install -DskipTests jfrog rt build-publish - echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=spring-cloud-dataflow-main-milestone >> $GITHUB_ENV + PROJECT_VERSION=$(mvn -gs .github/settings.xml -Pstagingmilestone help:evaluate -Dexpression=project.version -q -DforceStdout) + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$PROJECT_VERSION >> $GITHUB_ENV + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=spring-cloud-dataflow-2-11-x-milestone >> $GITHUB_ENV echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber=$GITHUB_RUN_NUMBER >> $GITHUB_ENV - # zoo tag +# set +e +# echo "::info ::Project version=$PROJECT_VERSION" +# SKIPPER_DOCS_PATTERN=$(.github/workflows/skipper-docs-name.sh $PROJECT_VERSION libs-milestone-local) +# if [[ "$SKIPPER_DOCS_PATTERN" == *"does not exist"* ]]; then +# echo "::error ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" +# else +# echo "::info ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" +# jfrog rt sp --build "$SKIPPER_DOCS_PATTERN" "buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" +# echo "::info ::Skipper Docs Set Properties buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" +# fi + +# zoo tag - name: Tag Release uses: jvalkeal/build-zoo-handler@v0.0.4 with: @@ -153,15 +121,16 @@ jobs: tag-release-tag: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} tag-release-tag-prefix: v - # build and publish images via composite action +# build and publish images via composite action - name: Build and Publish Images uses: ./.github/actions/build-images with: version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }} dockerhub-password: ${{ secrets.DOCKERHUB_TOKEN }} + GCR_JSON_KEY: ${{ secrets.GCR_JSON_KEY }} - # zoo success +# zoo success - name: Notify Build Success Zoo Handler Controller uses: jvalkeal/build-zoo-handler@v0.0.4 with: @@ -171,7 +140,7 @@ jobs: "event": "build-succeed" } - # zoo failure +# zoo failure - name: Notify Build Failure Zoo Handler Controller if: ${{ failure() }} uses: jvalkeal/build-zoo-handler@v0.0.4 @@ -182,7 +151,7 @@ jobs: "event": "build-failed", "message": "spring-cloud-dataflow failed" } - # clean m2 cache +# clean m2 cache - name: Clean cache run: | find ~/.m2/repository -type d -name '*SNAPSHOT' | xargs rm -fr diff --git a/.github/workflows/next-dev-version-controller.yml b/.github/workflows/next-dev-version-controller.yml index fba1759700..b5f78b3a93 100644 --- a/.github/workflows/next-dev-version-controller.yml +++ b/.github/workflows/next-dev-version-controller.yml @@ -24,90 +24,20 @@ jobs: { "if": "initial == true", "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-dataflow-build", - "ref": "main", - "workflow": "next-dev-version-worker.yml" - } - }, - { - "if": "data.event == 'next-dev-version-succeed' && data.repo == 'spring-cloud-dataflow-build' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-dataflow-common", - "ref": "main", - "workflow": "next-dev-version-worker.yml" - } - }, - { - "if": "data.event == 'next-dev-version-succeed' && data.repo == 'spring-cloud-dataflow-common' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", "workflow_dispatch": { "owner": "spring-cloud", "repo": "spring-cloud-deployer", - "ref": "main", + "ref": "2.9.x", "workflow": "next-dev-version-worker.yml" } }, { "if": "data.event == 'next-dev-version-succeed' && data.repo == 'spring-cloud-deployer' && data.owner == 'spring-cloud'", "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-deployer-local", - "ref": "main", - "workflow": "next-dev-version-worker.yml" - } - }, - { - "if": "data.event == 'next-dev-version-succeed' && data.repo == 'spring-cloud-deployer-local' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-deployer-cloudfoundry", - "ref": "main", - "workflow": "next-dev-version-worker.yml" - } - }, - { - "if": "data.event == 'next-dev-version-succeed' && data.repo == 'spring-cloud-deployer-cloudfoundry' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-deployer-kubernetes", - "ref": "main", - "workflow": "next-dev-version-worker.yml" - } - }, - { - "if": "data.event == 'next-dev-version-succeed' && data.repo == 'spring-cloud-deployer-kubernetes' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-common-security-config", - "ref": "main", - "workflow": "next-dev-version-worker.yml" - } - }, - { - "if": "data.event == 'next-dev-version-succeed' && data.repo == 'spring-cloud-common-security-config' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-skipper", - "ref": "main", - "workflow": "next-dev-version-worker.yml" - } - }, - { - "if": "data.event == 'next-dev-version-succeed' && data.repo == 'spring-cloud-skipper' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", "workflow_dispatch": { "owner": "spring-cloud", "repo": "spring-cloud-dataflow-ui", - "ref": "main", + "ref": "3.4.x", "workflow": "next-dev-version-worker.yml" } }, @@ -117,7 +47,7 @@ jobs: "workflow_dispatch": { "owner": "spring-cloud", "repo": "spring-cloud-dataflow", - "ref": "main", + "ref": "2.11.x", "workflow": "next-dev-version-worker.yml" } }, diff --git a/.github/workflows/next-dev-version-worker.yml b/.github/workflows/next-dev-version-worker.yml index 8a1084b8f7..2b39130b9d 100644 --- a/.github/workflows/next-dev-version-worker.yml +++ b/.github/workflows/next-dev-version-worker.yml @@ -7,30 +7,30 @@ on: description: 'Build Zoo Handler Payload' required: true +env: + MAVEN_THREADS: '' + jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v1 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 with: - java-version: 1.8 + java-version: '8' + distribution: 'liberica' - uses: jvalkeal/setup-maven@v1 with: - maven-version: 3.6.3 - - uses: jfrog/setup-jfrog-cli@v1 - with: - version: 1.46.4 + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - uses: jfrog/setup-jfrog-cli@v3 env: - JF_ARTIFACTORY_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} - - name: Install pack - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - pack-version: 0.18.0 + JF_URL: 'https://repo.spring.io' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} # cache maven .m2 - - uses: actions/cache@v2 + - uses: actions/cache@v3 with: path: ~/.m2/repository key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} @@ -41,10 +41,13 @@ jobs: - name: Configure JFrog Cli run: | jfrog rt mvnc \ - --server-id-deploy=repo.spring.io \ - --repo-deploy-releases=release \ - --repo-deploy-snapshots=snapshot - echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-main-ndv >> $GITHUB_ENV + --server-id-resolve=${{ vars.JF_SERVER_ID }} \ + --server-id-deploy=${{ vars.JF_SERVER_ID }} \ + --repo-resolve-releases=libs-milestone \ + --repo-resolve-snapshots=libs-snapshot \ + --repo-deploy-releases=libs-release-local \ + --repo-deploy-snapshots=libs-snapshot-local + echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-2-11-x-ndv >> $GITHUB_ENV echo JFROG_CLI_BUILD_NUMBER=$GITHUB_RUN_NUMBER >> $GITHUB_ENV # zoo extract and ensure @@ -62,63 +65,37 @@ jobs: -Dartifactory.publish.artifacts=false \ -DnewVersion='${parsedVersion.majorVersion}.${parsedVersion.minorVersion}.${parsedVersion.nextIncrementalVersion}-SNAPSHOT' \ -B - echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) >> $GITHUB_ENV - jfrog rt mvn versions:update-parent \ - -DallowSnapshots=true \ - -DgenerateBackupPoms=false \ - -DparentVersion='['${BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_version}']' \ - -B + BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_version >> $GITHUB_ENV + echo "::notice ::set dataflow.version=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_version}" jfrog rt mvn versions:set-property \ -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-dataflow-common.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_version} \ - -B + -Dproperty=dataflow.version \ + -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_version} \ + -B jfrog rt mvn versions:set-property \ -DgenerateBackupPoms=false \ -Dproperty=spring-cloud-deployer.version \ -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_version} \ -B - jfrog rt mvn versions:set-property \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-deployer-local.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_local_version} \ - -B - jfrog rt mvn versions:set-property \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-deployer-cloudfoundry.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_version} \ - -B - jfrog rt mvn versions:set-property \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-deployer-kubernetes.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_version} \ - -B - jfrog rt mvn versions:set-property \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-common-security-config.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_common_security_config_version} \ - -B - jfrog rt mvn versions:set-property \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-skipper.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_skipper_version} \ - -B jfrog rt mvn versions:set-property \ -DgenerateBackupPoms=false \ -Dproperty=spring-cloud-dataflow-ui.version \ -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version} \ -B jfrog rt build-clean - jfrog rt mvn clean install -DskipTests -B + mvn clean + ./spring-cloud-dataflow-package/set-package-version.sh + jfrog rt mvn install -DskipTests -B jfrog rt build-publish - echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=spring-cloud-dataflow-main-ndv >> $GITHUB_ENV + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=spring-cloud-dataflow-2-11-x-ndv >> $GITHUB_ENV echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber=$GITHUB_RUN_NUMBER >> $GITHUB_ENV # zoo commit - name: Commit Next Dev Changes uses: jvalkeal/build-zoo-handler@v0.0.4 with: - commit-changes-branch: main + commit-changes-branch: 2.11.x commit-changes-message: Next development version # zoo success diff --git a/.github/workflows/promote-milestone.yml b/.github/workflows/promote-milestone.yml index 98ea49aea9..506ba2f317 100644 --- a/.github/workflows/promote-milestone.yml +++ b/.github/workflows/promote-milestone.yml @@ -13,34 +13,19 @@ jobs: environment: promote steps: # need repo to push release branch and a tag - - uses: actions/checkout@v2 - - uses: jfrog/setup-jfrog-cli@v1 - with: - version: 1.46.4 + - uses: actions/checkout@v4 + - uses: jfrog/setup-jfrog-cli@v3 env: - JF_ARTIFACTORY_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + JF_URL: 'https://repo.spring.io' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} # zoo extract and ensure - name: Extract Zoo Context Properties uses: jvalkeal/build-zoo-handler@v0.0.4 with: dispatch-handler-extract-context-properties: true ensure-env: | - BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_buildname - BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_buildname - BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_buildnumber BUILD_ZOO_HANDLER_spring_cloud_deployer_buildname BUILD_ZOO_HANDLER_spring_cloud_deployer_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_deployer_local_buildname - BUILD_ZOO_HANDLER_spring_cloud_deployer_local_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_buildname - BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_buildname - BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_common_security_config_buildname - BUILD_ZOO_HANDLER_spring_cloud_common_security_config_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_skipper_buildname - BUILD_ZOO_HANDLER_spring_cloud_skipper_buildnumber BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildname BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildnumber BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname @@ -48,14 +33,7 @@ jobs: # promoting build from staging repo into milestone - name: Promote Build run: | - jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_buildname $BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_buildnumber libs-milestone-local - jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_buildname $BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_buildnumber libs-milestone-local jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_deployer_buildname $BUILD_ZOO_HANDLER_spring_cloud_deployer_buildnumber libs-milestone-local - jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_deployer_local_buildname $BUILD_ZOO_HANDLER_spring_cloud_deployer_local_buildnumber libs-milestone-local - jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_buildname $BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_buildnumber libs-milestone-local - jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_buildname $BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_buildnumber libs-milestone-local - jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_common_security_config_buildname $BUILD_ZOO_HANDLER_spring_cloud_common_security_config_buildnumber libs-milestone-local - jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_skipper_buildname $BUILD_ZOO_HANDLER_spring_cloud_skipper_buildnumber libs-milestone-local jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildname $BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildnumber libs-milestone-local jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname $BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber libs-milestone-local diff --git a/.github/workflows/promote-release.yml b/.github/workflows/promote-release.yml index b3eaa3d788..5b46a7b461 100644 --- a/.github/workflows/promote-release.yml +++ b/.github/workflows/promote-release.yml @@ -13,34 +13,19 @@ jobs: environment: promote steps: # need repo to push release branch and a tag - - uses: actions/checkout@v2 - - uses: jfrog/setup-jfrog-cli@v1 - with: - version: 1.46.4 + - uses: actions/checkout@v4 + - uses: jfrog/setup-jfrog-cli@v3 env: - JF_ARTIFACTORY_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + JF_URL: 'https://repo.spring.io' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} # zoo extract and ensure - name: Extract Zoo Context Properties uses: jvalkeal/build-zoo-handler@v0.0.4 with: dispatch-handler-extract-context-properties: true ensure-env: | - BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_buildname - BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_buildname - BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_buildnumber BUILD_ZOO_HANDLER_spring_cloud_deployer_buildname BUILD_ZOO_HANDLER_spring_cloud_deployer_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_deployer_local_buildname - BUILD_ZOO_HANDLER_spring_cloud_deployer_local_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_buildname - BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_buildname - BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_common_security_config_buildname - BUILD_ZOO_HANDLER_spring_cloud_common_security_config_buildnumber - BUILD_ZOO_HANDLER_spring_cloud_skipper_buildname - BUILD_ZOO_HANDLER_spring_cloud_skipper_buildnumber BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildname BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildnumber BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname @@ -48,14 +33,7 @@ jobs: # promoting build from staging repo into release - name: Promote Build run: | - jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_buildname $BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_buildnumber libs-release-local - jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_buildname $BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_buildnumber libs-release-local jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_deployer_buildname $BUILD_ZOO_HANDLER_spring_cloud_deployer_buildnumber libs-release-local - jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_deployer_local_buildname $BUILD_ZOO_HANDLER_spring_cloud_deployer_local_buildnumber libs-release-local - jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_buildname $BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_buildnumber libs-release-local - jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_buildname $BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_buildnumber libs-release-local - jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_common_security_config_buildname $BUILD_ZOO_HANDLER_spring_cloud_common_security_config_buildnumber libs-release-local - jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_skipper_buildname $BUILD_ZOO_HANDLER_spring_cloud_skipper_buildnumber libs-release-local jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildname $BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildnumber libs-release-local jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname $BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber libs-release-local diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml new file mode 100644 index 0000000000..3bf3c6dc78 --- /dev/null +++ b/.github/workflows/publish-docs.yml @@ -0,0 +1,93 @@ +name: Publish Documentation + +on: + workflow_dispatch: + inputs: + version: + description: 'Version to publish' + required: true + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 + with: + java-version: '8' + distribution: 'liberica' + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: 'https://repo.spring.io' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + - uses: actions/cache@v3 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-m2- + +# target deploy repos + - name: Configure JFrog Cli + run: | + jfrog rt mvnc \ + --server-id-resolve=${{ vars.JF_SERVER_ID }} \ + --server-id-deploy=${{ vars.JF_SERVER_ID }} \ + --repo-resolve-releases=libs-release-staging \ + --repo-resolve-snapshots=libs-snapshot \ + --repo-deploy-releases=libs-staging-local \ + --repo-deploy-snapshots=libs-snapshot-local + echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-2-11-x-release >> $GITHUB_ENV + echo JFROG_CLI_BUILD_NUMBER=$GITHUB_RUN_NUMBER >> $GITHUB_ENV + + - name: 'Install: xmllint' + uses: ./.github/actions/install-xmlutils +# build and publish to configured target + - name: Build and Publish + run: | + jfrog rt mvn build-helper:parse-version versions:set \ + -gs .github/settings.xml \ + -DskipResolution=true \ + -DprocessAllModules=true \ + -DgenerateBackupPoms=false \ + -Dartifactory.publish.artifacts=false \ + -DnewVersion=${{ inputs.version }} \ + -B + jfrog rt mvn versions:set-property \ + -gs .github/settings.xml \ + -Pstagingrelease \ + -DskipResolution=true \ + -DgenerateBackupPoms=false \ + -Dproperty=dataflow.version \ + -DnewVersion=${{ inputs.version }} \ + -B + jfrog rt mvn versions:set-property \ + -gs .github/settings.xml \ + -Pstagingrelease \ + -DskipResolution=true \ + -DgenerateBackupPoms=false \ + -Dproperty=skipper.version \ + -DnewVersion=${{ inputs.version }} \ + -B + spring_cloud_dataflow_version=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) + echo "spring_cloud_dataflow_version=$spring_cloud_dataflow_version" >> $GITHUB_ENV + echo "::notice ::set dataflow.version=${spring_cloud_dataflow_version}" + echo "::notice ::build-clean" + jfrog rt build-clean + echo "::notice ::set-package-version" + ./spring-cloud-dataflow-package/set-package-version.sh + echo "::notice ::install" + mvn -am -pl :spring-cloud-skipper-server-core install -DskipTests -T 1C + jfrog rt mvn -gs .github/settings.xml \ + -Pstagingrelease,full,deploymentfiles,docs \ + -pl :spring-cloud-dataflow-classic-docs,:spring-cloud-dataflow-docs,:spring-cloud-skipper-docs \ + -B install -DskipTests + echo "::notice ::build-publish" + jfrog rt build-publish + echo "::info ::spring_cloud_dataflow_version=$spring_cloud_dataflow_version" + echo "::info ::spring_cloud_dataflow_buildname=$JFROG_CLI_BUILD_NAME" + echo "::info ::spring_cloud_dataflow_buildnumber=$JFROG_CLI_BUILD_NUMBER" diff --git a/.github/workflows/release-controller.yml b/.github/workflows/release-controller.yml index 58fe424431..d3837b0556 100644 --- a/.github/workflows/release-controller.yml +++ b/.github/workflows/release-controller.yml @@ -14,171 +14,111 @@ jobs: runs-on: ubuntu-latest steps: - # initial is when user starts workflow from UI(context is empty) - # then train build goes through via repos using defined hander rules - - name: Handle Zoo Control - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} - dispatch-handler-max: 20 - dispatch-handler-config: > - [ - { - "if": "initial == true", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-dataflow-build", - "ref": "main", - "workflow": "release-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-build' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-dataflow-common", - "ref": "main", - "workflow": "release-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-common' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-deployer", - "ref": "main", - "workflow": "release-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-deployer-local", - "ref": "main", - "workflow": "release-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer-local' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-deployer-cloudfoundry", - "ref": "main", - "workflow": "release-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer-cloudfoundry' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-deployer-kubernetes", - "ref": "main", - "workflow": "release-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer-kubernetes' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-common-security-config", - "ref": "main", - "workflow": "release-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-common-security-config' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-skipper", - "ref": "main", - "workflow": "release-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-skipper' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-dataflow-ui", - "ref": "main", - "workflow": "release-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-ui' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-dataflow", - "ref": "main", - "workflow": "release-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-dataflow", - "ref": "main", - "workflow": "promote-release.yml" - } - }, - { - "if": "data.event == 'promote-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-dataflow", - "ref": "main", - "workflow": "central-release.yml" - } - }, - { - "if": "data.event == 'central-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-dataflow", - "ref": "main", - "workflow": "github-release.yml" - } - }, - { - "if": "data.event == 'promotion-failed'", - "action": "fail", - "fail": { - "message": "artifactory promotion failed" - } - }, - { - "if": "data.event == 'central-failed'", - "action": "fail", - "fail": { - "message": "central sync failed" - } - }, - { - "if": "data.event == 'ghrelease-failed'", - "action": "fail", - "fail": { - "message": "github release failed" - } - }, - { - "if": "data.event == 'build-failed'", - "action": "fail", - "fail": { - "message": "something went wrong in build train" - } - } - ] + # initial is when user starts workflow from UI(context is empty) + # then train build goes through via repos using defined hander rules + - name: Handle Zoo Control + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-max: 20 + dispatch-handler-config: > + [ + { + "if": "initial == true", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-deployer", + "ref": "2.9.x", + "workflow": "release-worker.yml" + } + }, + { + "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow-ui", + "ref": "3.4.x", + "workflow": "release-worker.yml" + } + }, + { + "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-ui' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow", + "ref": "2.11.x", + "workflow": "release-worker.yml" + } + }, + { + "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow", + "ref": "2.11.x", + "workflow": "carvel-worker.yml" + } + }, + { + "if": "data.event == 'carvel-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow", + "ref": "2.11.x", + "workflow": "promote-release.yml" + } + }, + { + "if": "data.event == 'promote-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow", + "ref": "2.11.x", + "workflow": "central-release.yml" + } + }, + { + "if": "data.event == 'central-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow", + "ref": "2.11.x", + "workflow": "github-release.yml" + } + }, + { + "if": "data.event == 'promotion-failed'", + "action": "fail", + "fail": { + "message": "artifactory promotion failed" + } + }, + { + "if": "data.event == 'central-failed'", + "action": "fail", + "fail": { + "message": "central sync failed" + } + }, + { + "if": "data.event == 'ghrelease-failed'", + "action": "fail", + "fail": { + "message": "github release failed" + } + }, + { + "if": "data.event == 'build-failed'", + "action": "fail", + "fail": { + "message": "something went wrong in build train" + } + } + ] diff --git a/.github/workflows/release-worker.yml b/.github/workflows/release-worker.yml index 19a5bcedaa..45b6429229 100644 --- a/.github/workflows/release-worker.yml +++ b/.github/workflows/release-worker.yml @@ -7,181 +7,190 @@ on: description: 'Build Zoo Handler Payload' required: true +env: + MAVEN_THREADS: '-T 1' + jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v1 - with: - java-version: 1.8 - - uses: jvalkeal/setup-maven@v1 - with: - maven-version: 3.6.3 - - uses: jfrog/setup-jfrog-cli@v1 - with: - version: 1.46.4 - env: - JF_ARTIFACTORY_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} - - name: Install pack - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - pack-version: 0.18.0 - - uses: actions/cache@v2 - with: - path: ~/.m2/repository - key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} - restore-keys: | - ${{ runner.os }}-m2- - - # target deploy repos - - name: Configure JFrog Cli - run: | - jfrog rt mvnc \ - --server-id-deploy=repo.spring.io \ - --repo-deploy-releases=libs-staging-local \ - --repo-deploy-snapshots=libs-staging-local - echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-main-release >> $GITHUB_ENV - echo JFROG_CLI_BUILD_NUMBER=$GITHUB_RUN_NUMBER >> $GITHUB_ENV - - # zoo extract and ensure - - name: Extract Zoo Context Properties - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - dispatch-handler-extract-context-properties: true - ensure-env: | - BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_version - BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_version - BUILD_ZOO_HANDLER_spring_cloud_deployer_version - BUILD_ZOO_HANDLER_spring_cloud_deployer_local_version - BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_version - BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_version - BUILD_ZOO_HANDLER_spring_cloud_common_security_config_version - BUILD_ZOO_HANDLER_spring_cloud_skipper_version - BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 + with: + java-version: '8' + distribution: 'liberica' + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: 'https://repo.spring.io' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + - uses: actions/cache@v3 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-m2- - # build and publish to configured target - - name: Build and Publish - run: | - jfrog rt mvn build-helper:parse-version versions:set \ - -gs .github/settings.xml \ - -Pstagingrelease \ - -DprocessAllModules=true \ - -DgenerateBackupPoms=false \ - -Dartifactory.publish.artifacts=false \ - -DnewVersion='${parsedVersion.majorVersion}.${parsedVersion.minorVersion}.${parsedVersion.incrementalVersion}' \ - -B - echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) >> $GITHUB_ENV - jfrog rt mvn versions:update-parent \ - -gs .github/settings.xml \ - -Pstagingrelease \ - -DgenerateBackupPoms=false \ - -DparentVersion='['${BUILD_ZOO_HANDLER_spring_cloud_dataflow_build_version}']' \ - -B - jfrog rt mvn versions:set-property \ - -gs .github/settings.xml \ - -Pstagingrelease \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-dataflow-common.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_common_version} \ - -B - jfrog rt mvn versions:set-property \ - -gs .github/settings.xml \ - -Pstagingrelease \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-deployer.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_version} \ - -B - jfrog rt mvn versions:set-property \ - -gs .github/settings.xml \ - -Pstagingrelease \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-deployer-local.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_local_version} \ - -B - jfrog rt mvn versions:set-property \ - -gs .github/settings.xml \ - -Pstagingrelease \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-deployer-cloudfoundry.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_cloudfoundry_version} \ - -B - jfrog rt mvn versions:set-property \ - -gs .github/settings.xml \ - -Pstagingrelease \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-deployer-kubernetes.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_kubernetes_version} \ - -B - jfrog rt mvn versions:set-property \ - -gs .github/settings.xml \ - -Pstagingrelease \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-common-security-config.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_common_security_config_version} \ - -B - jfrog rt mvn versions:set-property \ - -gs .github/settings.xml \ - -Pstagingrelease \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-skipper.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_skipper_version} \ - -B - jfrog rt mvn versions:set-property \ - -gs .github/settings.xml \ - -Pstagingrelease \ - -DgenerateBackupPoms=false \ - -Dproperty=spring-cloud-dataflow-ui.version \ - -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version} \ - -B - jfrog rt build-clean - jfrog rt mvn clean install \ - -gs .github/settings.xml \ - -Pstagingrelease,full,deploymentfiles \ - -DskipTests -U -B - jfrog rt build-publish - echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=spring-cloud-dataflow-main-release >> $GITHUB_ENV - echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber=$GITHUB_RUN_NUMBER >> $GITHUB_ENV +# target deploy repos + - name: Configure JFrog Cli + run: | + jfrog rt mvnc \ + --server-id-resolve=${{ vars.JF_SERVER_ID }} \ + --server-id-deploy=${{ vars.JF_SERVER_ID }} \ + --repo-resolve-releases=libs-release-staging \ + --repo-resolve-snapshots=libs-snapshot \ + --repo-deploy-releases=libs-staging-local \ + --repo-deploy-snapshots=libs-snapshot-local + echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-2-11-x-release >> $GITHUB_ENV + echo JFROG_CLI_BUILD_NUMBER=$GITHUB_RUN_NUMBER >> $GITHUB_ENV - # zoo tag - - name: Tag Release - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - tag-release-branch: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} - tag-release-tag: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} - tag-release-tag-prefix: v +# zoo extract and ensure + - name: Extract Zoo Context Properties + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-extract-context-properties: true + ensure-env: | + BUILD_ZOO_HANDLER_spring_cloud_deployer_version + BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version + - name: 'Install: xmllint' + uses: ./.github/actions/install-xmlutils +# build and publish to configured target + - name: Build and Publish + run: | + jfrog rt mvn build-helper:parse-version versions:set \ + -gs .github/settings.xml \ + -Pstagingrelease \ + -DskipResolution=true \ + -DprocessAllModules=true \ + -DgenerateBackupPoms=false \ + -Dartifactory.publish.artifacts=false \ + -DnewVersion='${parsedVersion.majorVersion}.${parsedVersion.minorVersion}.${parsedVersion.incrementalVersion}' \ + -B + BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_version >> $GITHUB_ENV + echo "::notice ::set dataflow.version=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_version}" + jfrog rt mvn versions:set-property \ + -gs .github/settings.xml \ + -Pstagingrelease \ + -DskipResolution=true \ + -DgenerateBackupPoms=false \ + -Dproperty=dataflow.version \ + -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_version} \ + -B + echo "::notice ::set spring-cloud-deployer.version=${BUILD_ZOO_HANDLER_spring_cloud_deployer_version}" + jfrog rt mvn versions:set-property \ + -gs .github/settings.xml \ + -Pstagingrelease \ + -DskipResolution=true \ + -DgenerateBackupPoms=false \ + -Dproperty=spring-cloud-deployer.version \ + -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_version} \ + -B + echo "::notice ::set spring-cloud-dataflow-ui.version=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version}" + jfrog rt mvn versions:set-property \ + -gs .github/settings.xml \ + -DskipResolution=true \ + -Pstagingrelease \ + -DgenerateBackupPoms=false \ + -Dproperty=spring-cloud-dataflow-ui.version \ + -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version} \ + -B + echo "::notice ::build-clean" + jfrog rt build-clean + echo "::notice ::set-package-version" + ./spring-cloud-dataflow-package/set-package-version.sh + echo "::notice ::install" + jfrog rt mvn -gs .github/settings.xml -Pstagingrelease,full,deploymentfiles,docs -B install -DskipTests + echo "::notice ::build-publish" + jfrog rt build-publish + PROJECT_VERSION=$(mvn -gs .github/settings.xml -Pstagingrelease help:evaluate -Dexpression=project.version -q -DforceStdout) + spring_cloud_dataflow_version=$PROJECT_VERSION + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$spring_cloud_dataflow_version >> $GITHUB_ENV + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=spring-cloud-dataflow-2-11-x-release >> $GITHUB_ENV + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber=$GITHUB_RUN_NUMBER >> $GITHUB_ENV + echo BUILD_ZOO_HANDLER_spring_cloud_skipper_version=$spring_cloud_dataflow_version >> $GITHUB_ENV + + echo "::info ::BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$spring_cloud_dataflow_version" + echo "::info ::BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=spring-cloud-dataflow-2-11-x-release" + echo "::info ::BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber=$GITHUB_RUN_NUMBER" + echo "::info ::BUILD_ZOO_HANDLER_spring_cloud_skipper_version=$spring_cloud_dataflow_version" - # build and publish images via composite action - - name: Build and Publish Images - uses: ./.github/actions/build-images - with: - version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} - dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }} - dockerhub-password: ${{ secrets.DOCKERHUB_TOKEN }} + # set +e + # echo "::info ::Project version=$PROJECT_VERSION" + # SKIPPER_DOCS_PATTERN=$(.github/workflows/skipper-docs-name.sh $PROJECT_VERSION libs-staging-local) + # if [[ "$SKIPPER_DOCS_PATTERN" == *"does not exist"* ]]; then + # echo "::error ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" + # else + # echo "::info ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" + # jfrog rt sp --build "$SKIPPER_DOCS_PATTERN" "buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" + # echo "::info ::Skipper Docs Set Properties buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" + # fi +# zoo tag + - name: Tag Release + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + tag-release-branch: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + tag-release-tag: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + tag-release-tag-prefix: v +# clean m2 cache + - name: Clean cache + run: | + find ~/.m2/repository -type d -name '*SNAPSHOT' | xargs rm -fr + outputs: + version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + BUILD_ZOO_HANDLER_spring_cloud_dataflow_version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname }} + BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber }} + BUILD_ZOO_HANDLER_spring_cloud_skipper_version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_skipper_version }} + images: + name: Build and Publish Images + needs: + - build + uses: ./.github/workflows/build-images.yml + with: + version: ${{ needs.build.outputs.version }} + secrets: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + GCR_JSON_KEY: ${{ secrets.GCR_JSON_KEY }} + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + wrap: + needs: [ build, images ] + runs-on: ubuntu-latest + steps: + - name: Save env + shell: bash + if: ${{ success() }} + run: | + echo "BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }}" >> $GITHUB_ENV + echo "BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname }}" >> $GITHUB_ENV + echo "BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber }}" >> $GITHUB_ENV + echo "BUILD_ZOO_HANDLER_spring_cloud_skipper_version=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_skipper_version }}" >> $GITHUB_ENV - # zoo success - - name: Notify Build Success Zoo Handler Controller - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} - dispatch-handler-client-payload-data: > - { - "event": "build-succeed" - } +# zoo success + - name: Notify Build Success Zoo Handler Controller + if: ${{ success() }} + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "build-succeed" + } - # zoo failure - - name: Notify Build Failure Zoo Handler Controller - if: ${{ failure() }} - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} - dispatch-handler-client-payload-data: > - { - "event": "build-failed", - "message": "spring-cloud-dataflow failed" - } - # clean m2 cache - - name: Clean cache - run: | - find ~/.m2/repository -type d -name '*SNAPSHOT' | xargs rm -fr +# zoo failure + - name: Notify Build Failure Zoo Handler Controller + if: ${{ failure() }} + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "build-failed", + "message": "spring-cloud-dataflow failed" + } diff --git a/.github/workflows/schedule-train-builds.yml b/.github/workflows/schedule-train-builds.yml new file mode 100644 index 0000000000..74053e2233 --- /dev/null +++ b/.github/workflows/schedule-train-builds.yml @@ -0,0 +1,19 @@ +name: Schedule Snapshot Train Builds + +on: + workflow_dispatch: + schedule: + - cron: '0 2 * * 1,2,3,4,5' + +jobs: + schedule: + runs-on: ubuntu-latest + strategy: + matrix: + branch: [2.11.x] + steps: + - uses: benc-uk/workflow-dispatch@v1 + with: + workflow: Build Snapshot Controller + token: ${{ secrets.SCDF_ACCESS_TOKEN }} + ref: ${{ matrix.branch }} diff --git a/.github/workflows/skipper-docs-name.sh b/.github/workflows/skipper-docs-name.sh new file mode 100755 index 0000000000..ef030d56d7 --- /dev/null +++ b/.github/workflows/skipper-docs-name.sh @@ -0,0 +1,37 @@ +#!/bin/bash +VERSION=$1 +if [ "$1" == "" ]; then + echo "Version is required" + exit 1 +fi +if [ "$2" != "" ]; then + REPO="$2" +fi + +if [ -z "$REPO" ]; then + if [[ "$VERSION" == *"-SNAPSHOT"* ]]; then + REPO="libs-snapshot-local" + elif [[ "$VERSION" == *"-M"* ]] || [[ "${VERSION}" == *"-RC"* ]]; then + REPO="libs-milestone-local" + else + REPO="libs-release-local" + fi +fi +CURL_TOKEN="$ARTIFACTORY_USERNAME:$ARTIFACTORY_PASSWORD" +if [[ "$REPO" == *"snapshot"* ]]; then + META_DATA_URL="https://repo.spring.io/artifactory/$REPO/org/springframework/cloud/spring-cloud-skipper-docs/${VERSION}/maven-metadata.xml" + curl -u "$CURL_TOKEN" --basic -o maven-metadata.xml -s -XGET -L "$META_DATA_URL" # > /dev/null + DL_TS=$(xmllint --xpath "/metadata/versioning/snapshot/timestamp/text()" maven-metadata.xml | sed 's/\.//') + DL_VERSION=$(xmllint --xpath "/metadata/versioning/snapshotVersions/snapshotVersion[extension/text() = 'pom' and updated/text() = '$DL_TS']/value/text()" maven-metadata.xml) + REMOTE_PATH="org/springframework/cloud/spring-cloud-skipper-docs/${VERSION}/spring-cloud-skipper-docs-${DL_VERSION}.zip" +else + REMOTE_PATH="org/springframework/cloud/spring-cloud-skipper-docs/${VERSION}/spring-cloud-skipper-docs-${VERSION}.zip" +fi +REMOTE_FILE="https://repo.spring.io/artifactory/${REPO}/$REMOTE_PATH" +RC=$(curl -u "$CURL_TOKEN" --basic -o /dev/null -L -s -Iw '%{http_code}' "$REMOTE_FILE") +if ((RC<300)); then + echo "$REMOTE_PATH" +else + echo "$REMOTE_FILE does not exist. Error code $RC" + exit 2 +fi diff --git a/.gitignore b/.gitignore index 3efcf66b65..918ed9330e 100644 --- a/.gitignore +++ b/.gitignore @@ -52,3 +52,17 @@ cleanup.sh # Carvel node_modules +!/.idea/checkstyle-idea.xml +yagni/ +workspace/ +*.sh.txt +*.yml.txt +/src/deploy/shell/*.jar +*.log +*.shell +/*.json +*.tar +/srp +/srp*gz +/observer/* + diff --git a/.jdk8 b/.jdk8 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties index 5bb39dc257..3be380875a 100644 --- a/.mvn/wrapper/maven-wrapper.properties +++ b/.mvn/wrapper/maven-wrapper.properties @@ -1,2 +1 @@ -distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.6.0/apache-maven-3.6.0-bin.zip - +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.4/apache-maven-3.9.4-bin.zip diff --git a/.run/spring-cloud-dataflow [clean,install].run.xml b/.run/spring-cloud-dataflow [clean,install].run.xml new file mode 100644 index 0000000000..83b1bded63 --- /dev/null +++ b/.run/spring-cloud-dataflow [clean,install].run.xml @@ -0,0 +1,63 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/.sdkmanrc b/.sdkmanrc new file mode 100644 index 0000000000..93565edd75 --- /dev/null +++ b/.sdkmanrc @@ -0,0 +1,21 @@ +# +# Copyright 2005-2022 the original author or authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Use `sdk env` to manually apply this file. +# Set `sdkman_auto_env=true` in $HOME/.sdkman/etc/config to make it automatic. +# +# NOTE: Switching branches will NOT trigger a change. Only switching folder will do it. Use `sdk env` to apply when simply switching branches. + +java=8.0.402-librca diff --git a/.settings.xml b/.settings.xml index 6066f6436c..2cd7c001a7 100644 --- a/.settings.xml +++ b/.settings.xml @@ -3,27 +3,44 @@ repo.spring.io - ${env.CI_DEPLOY_USERNAME} - ${env.CI_DEPLOY_PASSWORD} + ${env.ARTIFACTORY_USERNAME} + ${env.ARTIFACTORY_PASSWORD} + + + spring-snapshots + ${env.ARTIFACTORY_USERNAME} + ${env.ARTIFACTORY_PASSWORD} + + + spring-milestones + ${env.ARTIFACTORY_USERNAME} + ${env.ARTIFACTORY_PASSWORD} + + + spring-staging + ${env.ARTIFACTORY_USERNAME} + ${env.ARTIFACTORY_PASSWORD} - spring true + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + spring-snapshots Spring Snapshots - https://repo.spring.io/libs-snapshot-local + https://repo.spring.io/snapshot true @@ -31,25 +48,25 @@ spring-milestones Spring Milestones - https://repo.spring.io/libs-milestone-local + https://repo.spring.io/milestone false - - spring-releases - Spring Releases - https://repo.spring.io/release + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 false - - - + spring-snapshots Spring Snapshots - https://repo.spring.io/libs-snapshot-local + https://repo.spring.io/snapshot true @@ -57,7 +74,7 @@ spring-milestones Spring Milestones - https://repo.spring.io/libs-milestone-local + https://repo.spring.io/milestone false @@ -65,4 +82,5 @@ + diff --git a/.springjavaformatconfig b/.springjavaformatconfig new file mode 100644 index 0000000000..12643781ce --- /dev/null +++ b/.springjavaformatconfig @@ -0,0 +1 @@ +java-baseline=8 \ No newline at end of file diff --git a/.trivyignore b/.trivyignore new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.vscode/launch.json b/.vscode/launch.json index 14d8a33dca..d2656754a8 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -29,6 +29,13 @@ "mainClass": "org.springframework.cloud.dataflow.server.single.DataFlowServerApplication", "projectName": "spring-cloud-dataflow-server", "args": "--spring.config.additional-location=src/config/scdf-mysql.yml" + }, + { + "type": "java", + "name": "SCDF Debug Attach", + "request": "attach", + "hostName": "localhost", + "port": 5005 } ] } \ No newline at end of file diff --git a/CONTRIBUTING.adoc b/CONTRIBUTING.adoc new file mode 100755 index 0000000000..280d74f428 --- /dev/null +++ b/CONTRIBUTING.adoc @@ -0,0 +1,55 @@ += Contributing to Spring Cloud Dataflow + +:github: https://github.com/spring-cloud/spring-cloud-dataflow + +Spring Cloud Dataflow is released under the Apache 2.0 license. If you would like to contribute something, or want to hack on the code this document should help you get started. + + + +== Code of Conduct +This project adheres to the Contributor Covenant link:CODE_OF_CONDUCT.adoc[code of conduct]. +By participating, you are expected to uphold this code. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io. + + + +== Using GitHub Issues +We use GitHub issues to track bugs and enhancements. +If you have a general usage question please ask on https://stackoverflow.com[Stack Overflow]. +The Spring Cloud Dataflow team and the broader community monitor the https://stackoverflow.com/tags/spring-cloud-dataflow[`spring-cloud-dataflow`] tag. + +If you are reporting a bug, please help to speed up problem diagnosis by providing as much information as possible. +Ideally, that would include a small sample project that reproduces the problem. + + + +== Reporting Security Vulnerabilities +If you think you have found a security vulnerability in Spring Pulsar please *DO NOT* disclose it publicly until we've had a chance to fix it. +Please don't report security vulnerabilities using GitHub issues, instead head over to https://spring.io/security-policy and learn how to disclose them responsibly. + + + +== Sign the Contributor License Agreement +Before we accept a non-trivial patch or pull request we will need you to https://cla.pivotal.io/sign/spring[sign the Contributor License Agreement]. +Signing the contributor's agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. +Active contributors might be asked to join the core team, and given the ability to merge pull requests. + + +=== Code Conventions and Housekeeping + +None of the following guidelines is essential for a pull request, but they all help your fellow developers understand and work with your code. +They can also be added after the original pull request but before a merge. + +* Use the Spring Framework code format conventions. If you use Eclipse, you can import formatter settings by using the `eclipse-code-formatter.xml` file from the https://github.com/spring-cloud/spring-cloud-build/blob/master/spring-cloud-dependencies-parent/eclipse-code-formatter.xml[Spring Cloud Build] project. +If you use IntelliJ, you can use the https://plugins.jetbrains.com/plugin/6546[Eclipse Code Formatter Plugin] to import the same file. +* Make sure all new `.java` files have a simple Javadoc class comment with at least an `@author` tag identifying you, and preferably at least a paragraph describing the class's purpose. +* Add the ASF license header comment to all new `.java` files (to do so, copy it from existing files in the project). +* Add yourself as an `@author` to the .java files that you modify substantially (more than cosmetic changes). +* Add some Javadocs and, if you change the namespace, some XSD doc elements. +* A few unit tests would help a lot as well. Someone has to do it, and your fellow developers appreciate the effort. +* If no one else uses your branch, rebase it against the current master (or other target branch in the main project). +* When writing a commit message, follow https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html[these conventions]. +If you fix an existing issue, add `Fixes gh-XXXX` (where XXXX is the issue number) at the end of the commit message. + + +== Working with the Code +For information on editing, building, and testing the code, see the link:${github}/wiki/Working-with-the-Code[Working with the Code] page on the project wiki. diff --git a/README.md b/README.md index 16621a73f3..bda8322737 100644 --- a/README.md +++ b/README.md @@ -4,21 +4,8 @@

-

- - Latest Release Version - - - Latest Snapshot Version - -
- - Build Status - -

+[![Build Status - CI](https://github.com/spring-cloud/spring-cloud-dataflow/actions/workflows/ci.yml/badge.svg)](https://github.com/spring-cloud/spring-cloud-dataflow/actions/workflows/ci.yml) + *Spring Cloud Data Flow* is a microservices-based toolkit for building streaming and batch data processing pipelines in Cloud Foundry and Kubernetes. @@ -66,24 +53,13 @@ For example, if relying on Maven coordinates, an application URI would be of the connects to the Spring Cloud Data Flow Server's REST API and supports a DSL that simplifies the process of defining a stream or task and managing its lifecycle. -**Community Implementations**: There are also community maintained Spring Cloud Data Flow implementations that are currently -based on the 1.7.x series of Spring Cloud Data Flow. - - * [HashiCorp Nomad](https://github.com/donovanmuller/spring-cloud-dataflow-server-nomad) - * [OpenShift](https://github.com/donovanmuller/spring-cloud-dataflow-server-openshift) - * [Apache Mesos](https://github.com/trustedchoice/spring-cloud-dataflow-server-mesos) - -The [Apache YARN](https://github.com/spring-cloud/spring-cloud-dataflow-server-yarn) implementation has reached end-of-life -status. Let us know at [Gitter](https://gitter.im/spring-cloud/spring-cloud-dataflow) if you are interested in forking -the project to continue developing and maintaining it. - ---- ## Building Clone the repo and type - $ ./mvnw clean install + $ ./mvnw -s .settings.xml clean install Looking for more information? Follow this [link](https://github.com/spring-cloud/spring-cloud-dataflow/blob/master/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-building.adoc). @@ -98,9 +74,48 @@ For more information please refer to the [Git documentation, Formatting and Whit ---- +## Running Locally w/ Oracle +By default, the Dataflow server jar does not include the Oracle database driver dependency. +If you want to use Oracle for development/testing when running locally, you can specify the `local-dev-oracle` Maven profile when building. +The following command will include the Oracle driver dependency in the jar: +``` +$ ./mvnw -s .settings.xml clean package -Plocal-dev-oracle +``` +You can follow the steps in the [Oracle on Mac ARM64](https://github.com/spring-cloud/spring-cloud-dataflow/wiki/Oracle-on-Mac-ARM64#run-container-in-docker) Wiki to run Oracle XE locally in Docker with Dataflow pointing at it. + +> **NOTE:** If you are not running Mac ARM64 just skip the steps related to Homebrew and Colima + +---- + +## Running Locally w/ Microsoft SQL Server +By default, the Dataflow server jar does not include the MSSQL database driver dependency. +If you want to use MSSQL for development/testing when running locally, you can specify the `local-dev-mssql` Maven profile when building. +The following command will include the MSSQL driver dependency in the jar: +``` +$ ./mvnw -s .settings.xml clean package -Plocal-dev-mssql +``` +You can follow the steps in the [MSSQL on Mac ARM64](https://github.com/spring-cloud/spring-cloud-dataflow/wiki/MSSQL-on-Mac-ARM64#running-dataflow-locally-against-mssql) Wiki to run MSSQL locally in Docker with Dataflow pointing at it. + +> **NOTE:** If you are not running Mac ARM64 just skip the steps related to Homebrew and Colima + +---- + +## Running Locally w/ IBM DB2 +By default, the Dataflow server jar does not include the DB2 database driver dependency. +If you want to use DB2 for development/testing when running locally, you can specify the `local-dev-db2` Maven profile when building. +The following command will include the DB2 driver dependency in the jar: +``` +$ ./mvnw -s .settings.xml clean package -Plocal-dev-db2 +``` +You can follow the steps in the [DB2 on Mac ARM64](https://github.com/spring-cloud/spring-cloud-dataflow/wiki/DB2-on-Mac-ARM64#running-dataflow-locally-against-db2) Wiki to run DB2 locally in Docker with Dataflow pointing at it. + +> **NOTE:** If you are not running Mac ARM64 just skip the steps related to Homebrew and Colima + +---- + ## Contributing -We welcome contributions! Follow this [link](https://github.com/spring-cloud/spring-cloud-dataflow/blob/master/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-contributing.adoc) for more information on how to contribute. +We welcome contributions! See the [CONTRIBUTING](./CONTRIBUTING.adoc) guide for details. ---- diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000000..8a9410d248 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,5 @@ +# Security Policy +## Reporting a Vulnerability + +If you think you have found a security vulnerability, please **DO NOT** disclose it publicly until we’ve had a chance to fix it. +Please don’t report security vulnerabilities using GitHub issues, instead head over to https://spring.io/security-policy and learn how to disclose them responsibly. diff --git a/build-carvel-package.sh b/build-carvel-package.sh new file mode 100755 index 0000000000..e0a0d263f2 --- /dev/null +++ b/build-carvel-package.sh @@ -0,0 +1,59 @@ +#!/bin/bash + +function create_and_clear() { + rm -rf "$1" + mkdir -p "$1" +} + +SCDIR=$(realpath $(dirname "$(readlink -f "${BASH_SOURCE[0]}")")) +set -euxo pipefail +pushd $SCDIR > /dev/null +export DATAFLOW_VERSION=$(./mvnw help:evaluate -o -Dexpression=project.version -q -DforceStdout) +export SKIPPER_VERSION=$(./mvnw help:evaluate -o -Dexpression=spring-cloud-skipper.version -pl spring-cloud-dataflow-parent -q -DforceStdout) + +if [ "$PACKAGE_VERSION" = "" ]; then + export PACKAGE_VERSION=$DATAFLOW_VERSION +fi + +# you can launch a local docker registry using docker run -d -p 5000:5000 --name registry registry:2.7 +# export REPO_PREFIX=":5000/" +readonly REPO_PREFIX="${REPO_PREFIX:-docker.io/}" + +export PACKAGE_BUNDLE_REPOSITORY="${REPO_PREFIX}springcloud/scdf-oss-package" +export REPOSITORY_BUNDLE="${REPO_PREFIX}springcloud/scdf-oss-repo" + +export SKIPPER_REPOSITORY="springcloud/spring-cloud-skipper-server" +export SERVER_REPOSITORY="springcloud/spring-cloud-dataflow-server" +export CTR_VERSION=$DATAFLOW_VERSION +export PACKAGE_NAME="scdf" +export PACKAGE_BUNDLE_TEMPLATE="src/carvel/templates/bundle/package" +export IMGPKG_LOCK_TEMPLATE="src/carvel/templates/imgpkg" +export VENDIR_SRC_IN="src/carvel/config" +export SERVER_VERSION="$DATAFLOW_VERSION" + +export PACKAGE_BUNDLE_GENERATED=/tmp/generated/packagebundle +export IMGPKG_LOCK_GENERATED_IN=/tmp/generated/imgpkgin +export IMGPKG_LOCK_GENERATED_OUT=/tmp/generated/imgpkgout +create_and_clear $PACKAGE_BUNDLE_GENERATED +create_and_clear $IMGPKG_LOCK_GENERATED_IN +create_and_clear $IMGPKG_LOCK_GENERATED_OUT + +echo "bundle-path=$PACKAGE_BUNDLE_GENERATED" +export SCDF_DIR="$SCDIR" + +sh "$SCDIR/.github/actions/build-package-bundle/build-package-bundle.sh" + +imgpkg push --bundle "$PACKAGE_BUNDLE_REPOSITORY:$PACKAGE_VERSION" --file "$PACKAGE_BUNDLE_GENERATED" + +export REPO_BUNDLE_TEMPLATE="src/carvel/templates/bundle/repo" + +export REPO_BUNDLE_RENDERED=/tmp/generated/reporendered +export REPO_BUNDLE_GENERATED=/tmp/generated/repobundle +create_and_clear $REPO_BUNDLE_RENDERED +create_and_clear $REPO_BUNDLE_GENERATED + +sh "$SCDIR/.github/actions/build-repository-bundle/build-repository-bundle.sh" + +imgpkg push --bundle "$REPOSITORY_BUNDLE:$PACKAGE_VERSION" --file "$REPO_BUNDLE_GENERATED" + +popd diff --git a/build-containers.sh b/build-containers.sh new file mode 100755 index 0000000000..77b9a21662 --- /dev/null +++ b/build-containers.sh @@ -0,0 +1,3 @@ +#!/bin/bash +./mvnw install -s .settings.xml -DskipTests -T 1C -am -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server,:spring-cloud-dataflow-composed-task-runner +./mvnw spring-boot:build-image -s .settings.xml -DskipTests -T 1C -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server,:spring-cloud-dataflow-composed-task-runner \ No newline at end of file diff --git a/lib/spring-doc-resources-0.2.5.zip b/lib/spring-doc-resources-0.2.5.zip new file mode 100644 index 0000000000..b1ff602652 Binary files /dev/null and b/lib/spring-doc-resources-0.2.5.zip differ diff --git a/models/batch4-5-simple.adoc b/models/batch4-5-simple.adoc new file mode 100644 index 0000000000..3ee7cdd389 --- /dev/null +++ b/models/batch4-5-simple.adoc @@ -0,0 +1,16 @@ += Simple solution + +* SchemaTarget Selection represents a set of schema version, prefix and name. +* Boot 2 is default and task and batch will remain as current. +* Boot 3 task and batch tables will have the same prefix BOOT3_ +* Data flow server will set the properties for prefixes for task and batch. +* Registration will require Schema (Boot2, Boot3) selection indicator. +* At task launch data flow server will create an entry in the correct task-exectution table and sequence mechanism with given prefix based on registration of task. +* Ability to disable Boot 3 support. The feature endpoint will include this indicator. +* The endpoints to list job and task executions will have to accept the BootVersion as an query parameter when it is absent is implies the default condition. `http://localhost:9393/tasks/executions{?schemaTarget}` +* When using the shell to list executions it will be an optional parameter `--schema-target=boot3` +* When viewing the Task Execution list or Job Execution list there will be a drop-down with the option of Default and Boot3. +* The each item in the list of executions do include links to retrieve the entity, and will be encoded with the schemaTarget by the resource assembler. + +* The UI only needs to add the drop-downs and passing selection into the query. +* The user will not have to do anything extra when creating composed tasks. diff --git a/pom.xml b/pom.xml index 6f13af2a7b..231aebbb60 100644 --- a/pom.xml +++ b/pom.xml @@ -2,20 +2,17 @@ 4.0.0 - spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + org.springframework.cloud + spring-cloud-dataflow + 2.11.6-SNAPSHOT + spring-cloud-dataflow + Spring Cloud Dataflow pom https://cloud.spring.io/spring-cloud-dataflow/ Pivotal Software, Inc. https://www.spring.io - - org.springframework.cloud - spring-cloud-dataflow-build - 2.9.2-SNAPSHOT - - Apache License, Version 2.0 @@ -52,410 +49,121 @@ https://github.com/spring-cloud/spring-cloud-dataflow/graphs/contributors - - 1.8 - -Xdoclint:none - - 3.2.2-SNAPSHOT - - 2.9.2-SNAPSHOT - 2.7.2-SNAPSHOT - 2.7.2-SNAPSHOT - 2.7.2-SNAPSHOT - 2.7.2-SNAPSHOT - - 2.8.2-SNAPSHOT - - 2.3.4 - - - 2.3.7.RELEASE - - 1.7.2-SNAPSHOT - - 1.2.0.RELEASE - - 0.8.5 - 3.0.2 - 2.2.0 - 1.5.5 - 0.5 - 2.11.1 - 3.0.2 - 2.10.6 - 1.11.731 - 1.15.2 - - 3.0.2 - 2.2.0 - 1.0.4 - 1.0.4 - + spring-cloud-dataflow-parent + spring-cloud-dataflow-build + spring-cloud-dataflow-common + spring-cloud-common-security-config spring-cloud-dataflow-container-registry spring-cloud-dataflow-configuration-metadata spring-cloud-dataflow-core-dsl + spring-cloud-dataflow-schema-core spring-cloud-dataflow-core - spring-cloud-dataflow-registry - spring-cloud-dataflow-rest-resource - spring-cloud-dataflow-single-step-batch-job - spring-cloud-dataflow-composed-task-runner + spring-cloud-dataflow-schema + spring-cloud-dataflow-aggregate-task spring-cloud-dataflow-server-core + spring-cloud-dataflow-rest-resource + spring-cloud-dataflow-audit + spring-cloud-dataflow-registry + spring-cloud-dataflow-platform-kubernetes + spring-cloud-dataflow-platform-cloudfoundry spring-cloud-dataflow-autoconfigure - spring-cloud-dataflow-server spring-cloud-dataflow-rest-client spring-cloud-dataflow-shell spring-cloud-dataflow-shell-core - spring-cloud-dataflow-classic-docs - spring-cloud-dataflow-docs spring-cloud-dataflow-completion - spring-cloud-dataflow-dependencies - spring-cloud-dataflow-platform-kubernetes - spring-cloud-dataflow-platform-cloudfoundry + spring-cloud-skipper spring-cloud-starter-dataflow-server spring-cloud-starter-dataflow-ui - spring-cloud-dataflow-audit - spring-cloud-dataflow-test + spring-cloud-dataflow-server spring-cloud-dataflow-tasklauncher + spring-cloud-dataflow-single-step-batch-job + spring-cloud-dataflow-composed-task-runner + spring-cloud-dataflow-test + spring-cloud-dataflow-dependencies + spring-cloud-dataflow-classic-docs + spring-cloud-dataflow-docs + spring-cloud-dataflow-package - - - - org.springframework.cloud - spring-cloud-dataflow-common-dependencies - ${spring-cloud-dataflow-common.version} - pom - import - - - org.springframework.cloud - spring-cloud-task-dependencies - ${spring-cloud-task.version} - pom - import - - - org.springframework.cloud - spring-cloud-starter-single-step-batch-job - ${spring-cloud-task.version} - - - org.springframework.cloud - spring-cloud-skipper-dependencies - ${spring-cloud-skipper.version} - pom - import - - - org.springframework.cloud - spring-cloud-dataflow-dependencies - 2.9.2-SNAPSHOT - pom - import - - - org.testcontainers - testcontainers-bom - ${testcontainers.version} - pom - import - - - org.springframework.cloud - spring-cloud-dataflow-ui - ${spring-cloud-dataflow-ui.version} - - - org.springframework.cloud - spring-cloud-deployer-spi - ${spring-cloud-deployer.version} - - - org.springframework.cloud - spring-cloud-deployer-resource-support - ${spring-cloud-deployer.version} - - - org.springframework.cloud - spring-cloud-deployer-resource-maven - ${spring-cloud-deployer.version} - - - org.springframework.cloud - spring-cloud-deployer-resource-docker - ${spring-cloud-deployer.version} - - - org.springframework.cloud - spring-cloud-deployer-local - ${spring-cloud-deployer-local.version} - - - org.springframework.cloud - spring-cloud-deployer-cloudfoundry - ${spring-cloud-deployer-cloudfoundry.version} - - - org.springframework.shell - spring-shell - ${spring-shell.version} - - - org.springframework.cloud - spring-cloud-starter-common-security-config-web - ${spring-cloud-common-security-config.version} - - - org.springframework.cloud - spring-cloud-deployer-kubernetes - ${spring-cloud-deployer-kubernetes.version} - - - org.apache.directory.server - apacheds-protocol-ldap - ${apache-directory-server.version} - - - io.codearte.props2yaml - props2yaml - ${codearte-props2yml.version} - - - org.springframework.security.oauth - spring-security-oauth2 - ${spring-security-oauth2.version} - - - net.javacrumbs.json-unit - json-unit-assertj - ${json-unit.version} - - - com.google.code.findbugs - jsr305 - ${findbugs.version} - - - joda-time - joda-time - ${joda-time.version} - - - com.amazonaws - aws-java-sdk-ecr - ${aws-java-sdk-ecr.version} - - - - com.wavefront - wavefront-spring-boot-bom - ${wavefront-spring-boot-bom.version} - pom - import - - - org.springframework.cloud.stream.app - stream-applications-micrometer-common - ${stream-applications.version} - - - org.springframework.cloud.stream.app - stream-applications-security-common - ${stream-applications.version} - - - org.springframework.cloud.stream.app - stream-applications-postprocessor-common - ${stream-applications.version} - - - org.springframework.cloud - spring-cloud-deployer-dependencies - ${spring-cloud-deployer.version} - pom - import - - - + + + org.codehaus.groovy + groovy-eclipse-batch + 3.0.8-01 + test + + + org.junit.jupiter + junit-jupiter-api + + + + + org.codehaus.groovy + groovy-all + 3.0.19 + pom + test + + + org.junit.jupiter + junit-jupiter-api + + + org.junit.platform + junit-platform-launcher + + + org.junit.jupiter + junit-jupiter-engine + + + + + org.apache.maven.plugins - maven-surefire-plugin - 2.22.1 + maven-compiler-plugin + 3.11.0 - - **/*Tests.java - **/*Test.java - - - **/Abstract*.java - - - ${argLine} + 1.8 + 1.8 - org.jacoco - jacoco-maven-plugin + org.codehaus.gmaven + groovy-maven-plugin + 2.1.1 + + + org.codehaus.groovy + groovy-eclipse-batch + 3.0.8-01 + + + org.codehaus.groovy + groovy-all + 3.0.19 + pom + + - agent - - prepare-agent - - - - report - test + validate - report + execute + + + ${project.basedir} + + ${project.basedir}/src/test/groovy/check-pom.groovy + - - org.apache.maven.plugins - maven-checkstyle-plugin - - - - - org.springframework.boot - spring-boot-maven-plugin - ${spring-boot.version} - - - org.sonarsource.scanner.maven - sonar-maven-plugin - ${sonar-maven-plugin.version} - - - org.jacoco - jacoco-maven-plugin - ${jacoco-maven-plugin.version} - - - org.apache.maven.plugins - maven-jar-plugin - 3.0.2 - - - org.apache.maven.plugins - maven-source-plugin - 3.0.1 - - - org.springframework.cloud - spring-cloud-dataflow-apps-docs-plugin - ${spring-cloud-dataflow-apps-docs-plugin.version} - - - generate-documentation - verify - - generate-documentation - - - - - - org.springframework.cloud - spring-cloud-dataflow-apps-metadata-plugin - ${spring-cloud-dataflow-apps-metadata-plugin.version} - - - - - - - org.apache.maven.plugins - maven-jxr-plugin - 2.5 - - - - - - deploymentfiles - - - - maven-resources-plugin - - - replace-deployment-files - process-resources - - copy-resources - - - true - ${basedir}/src - - - ${basedir}/src/templates - - **/* - - true - - - - - - - - - - - spring - - - spring-snapshots - Spring Snapshots - https://repo.spring.io/libs-snapshot - - true - - - - spring-milestones - Spring Milestones - https://repo.spring.io/libs-milestone-local - - false - - - - spring-releases - Spring Releases - https://repo.spring.io/release - - false - - - - - - spring-snapshots - Spring Snapshots - https://repo.spring.io/libs-snapshot-local - - true - - - - spring-milestones - Spring Milestones - https://repo.spring.io/libs-milestone-local - - false - - - - - diff --git a/spring-cloud-common-security-config/README.md b/spring-cloud-common-security-config/README.md new file mode 100644 index 0000000000..5466106ed9 --- /dev/null +++ b/spring-cloud-common-security-config/README.md @@ -0,0 +1,3 @@ +# Spring Cloud Common Security + +This repo holds the security configuration classes that are common across Spring Cloud (Spring Cloud Data Flow/Skipper for now) projects that use **Role** based authentication/authorization for their runtime server application(s). diff --git a/spring-cloud-common-security-config/pom.xml b/spring-cloud-common-security-config/pom.xml new file mode 100644 index 0000000000..a2bfe1f9e6 --- /dev/null +++ b/spring-cloud-common-security-config/pom.xml @@ -0,0 +1,23 @@ + + + 4.0.0 + spring-cloud-common-security-config + 2.11.6-SNAPSHOT + pom + spring-cloud-common-security-config + Spring Cloud Common Security Config + + + org.springframework.cloud + spring-cloud-dataflow-parent + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent + + + + spring-cloud-common-security-config-core + spring-cloud-common-security-config-web + spring-cloud-starter-common-security-config-web + + + diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-core/pom.xml b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/pom.xml new file mode 100644 index 0000000000..930e4260ec --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/pom.xml @@ -0,0 +1,69 @@ + + + 4.0.0 + + org.springframework.cloud + spring-cloud-common-security-config + 2.11.6-SNAPSHOT + + spring-cloud-common-security-config-core + spring-cloud-common-security-config-core + Spring Cloud Common Security Config Core + jar + + true + 3.4.1 + + + + org.springframework.security + spring-security-oauth2-client + + + org.springframework.boot + spring-boot-starter-test + test + + + javax.servlet + javax.servlet-api + provided + + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + -Xdoclint:none + + + + javadoc + + jar + + package + + + + + + diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptor.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptor.java new file mode 100644 index 0000000000..33bce77f53 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptor.java @@ -0,0 +1,74 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.core.support; + +import java.io.IOException; + +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpRequest; +import org.springframework.http.client.ClientHttpRequestExecution; +import org.springframework.http.client.ClientHttpRequestInterceptor; +import org.springframework.http.client.ClientHttpResponse; +import org.springframework.security.oauth2.core.OAuth2AccessToken; +import org.springframework.util.Assert; + +/** + * This implementation of a {@link ClientHttpRequestInterceptor} will retrieve, if available, the OAuth2 Access Token + * and add it to the {@code Authorization} HTTP header. + * + * @author Gunnar Hillert + */ +public class OAuth2AccessTokenProvidingClientHttpRequestInterceptor implements ClientHttpRequestInterceptor { + + private final String staticOauthAccessToken; + + private final OAuth2TokenUtilsService oauth2TokenUtilsService; + + public OAuth2AccessTokenProvidingClientHttpRequestInterceptor(String staticOauthAccessToken) { + super(); + Assert.hasText(staticOauthAccessToken, "staticOauthAccessToken must not be null or empty."); + this.staticOauthAccessToken = staticOauthAccessToken; + this.oauth2TokenUtilsService = null; + } + + public OAuth2AccessTokenProvidingClientHttpRequestInterceptor(OAuth2TokenUtilsService oauth2TokenUtilsService) { + super(); + this.oauth2TokenUtilsService = oauth2TokenUtilsService; + this.staticOauthAccessToken = null; + } + + @Override + public ClientHttpResponse intercept(HttpRequest request, byte[] body, ClientHttpRequestExecution execution) + throws IOException { + + final String tokenToUse; + + if (this.staticOauthAccessToken != null) { + tokenToUse = this.staticOauthAccessToken; + } + else if (this.oauth2TokenUtilsService != null){ + tokenToUse = this.oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser(); + } + else { + tokenToUse = null; + } + + if (tokenToUse != null) { + request.getHeaders().add(HttpHeaders.AUTHORIZATION, OAuth2AccessToken.TokenType.BEARER.getValue() + " " + tokenToUse); + } + return execution.execute(request, body); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2TokenUtilsService.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2TokenUtilsService.java new file mode 100644 index 0000000000..f03ba97f8a --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2TokenUtilsService.java @@ -0,0 +1,51 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.core.support; + +import org.springframework.security.core.Authentication; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClient; +import org.springframework.security.oauth2.client.authentication.OAuth2AuthenticationToken; + +/** + * Service providing OAuth2 Security-related utility methods that may + * required other Spring Security services. + * + * @author Gunnar Hillert + * @author Corneil du Plessis + * + */ +public interface OAuth2TokenUtilsService { + + /** + * Retrieves the access token from the {@link Authentication} implementation. + * + * @return Should never return null. + */ + String getAccessTokenOfAuthenticatedUser(); + + /** + * + * @return A client for the token. + */ + OAuth2AuthorizedClient getAuthorizedClient(OAuth2AuthenticationToken auth2AuthenticationToken); + + /** + * + * @param auth2AuthorizedClient Remove a client + */ + void removeAuthorizedClient(OAuth2AuthorizedClient auth2AuthorizedClient); + +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/test/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptorTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/test/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptorTests.java new file mode 100644 index 0000000000..16456705fa --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/test/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptorTests.java @@ -0,0 +1,108 @@ +/* + * Copyright 2018-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.core.support; + +import java.io.IOException; + +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpRequest; +import org.springframework.http.client.ClientHttpRequestExecution; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * + * @author Gunnar Hillert + * @author Corneil du Plessis + */ +class OAuth2AccessTokenProvidingClientHttpRequestInterceptorTests { + + @Test + void testOAuth2AccessTokenProvidingClientHttpRequestInterceptorWithEmptyConstructior() { + try { + new OAuth2AccessTokenProvidingClientHttpRequestInterceptor(""); + } + catch (IllegalArgumentException e) { + assertEquals("staticOauthAccessToken must not be null or empty.", e.getMessage()); + return; + } + fail("Expected an IllegalArgumentException to be thrown."); + } + + @Test + void testOAuth2AccessTokenProvidingClientHttpRequestInterceptorWithStaticTokenConstructor() { + final OAuth2AccessTokenProvidingClientHttpRequestInterceptor interceptor = + new OAuth2AccessTokenProvidingClientHttpRequestInterceptor("foobar"); + + final String accessToken = (String) ReflectionTestUtils.getField(interceptor, "staticOauthAccessToken"); + assertEquals("foobar", accessToken); + } + + @Test + void testInterceptWithStaticToken() throws IOException { + final OAuth2AccessTokenProvidingClientHttpRequestInterceptor interceptor = + new OAuth2AccessTokenProvidingClientHttpRequestInterceptor("foobar"); + final HttpHeaders headers = setupTest(interceptor); + + assertEquals(1, headers.size()); + assertEquals("Bearer foobar", headers.get("Authorization").get(0)); + } + + @Test + void testInterceptWithAuthentication() throws IOException { + final OAuth2TokenUtilsService oauth2TokenUtilsService = mock(OAuth2TokenUtilsService.class); + when(oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser()).thenReturn("foo-bar-123-token"); + + final OAuth2AccessTokenProvidingClientHttpRequestInterceptor interceptor = + new OAuth2AccessTokenProvidingClientHttpRequestInterceptor(oauth2TokenUtilsService); + final HttpHeaders headers = setupTest(interceptor); + + assertEquals(1, headers.size()); + assertEquals("Bearer foo-bar-123-token", headers.get("Authorization").get(0)); + } + + @Test + void testInterceptWithAuthenticationAndStaticToken() throws IOException { + final OAuth2TokenUtilsService oauth2TokenUtilsService = mock(OAuth2TokenUtilsService.class); + when(oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser()).thenReturn("foo-bar-123-token"); + + final OAuth2AccessTokenProvidingClientHttpRequestInterceptor interceptor = + new OAuth2AccessTokenProvidingClientHttpRequestInterceptor("foobar"); + final HttpHeaders headers = setupTest(interceptor); + + assertEquals(1, headers.size()); + assertEquals("Bearer foobar", headers.get("Authorization").get(0)); + } + + private HttpHeaders setupTest( OAuth2AccessTokenProvidingClientHttpRequestInterceptor interceptor) throws IOException { + final HttpRequest request = Mockito.mock(HttpRequest.class); + final ClientHttpRequestExecution clientHttpRequestExecution = Mockito.mock(ClientHttpRequestExecution.class); + final HttpHeaders headers = new HttpHeaders(); + + when(request.getHeaders()).thenReturn(headers); + interceptor.intercept(request, null, clientHttpRequestExecution); + verify(clientHttpRequestExecution, Mockito.times(1)).execute(request, null); + return headers; + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/pom.xml b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/pom.xml new file mode 100644 index 0000000000..edcb6f2d9e --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/pom.xml @@ -0,0 +1,126 @@ + + + 4.0.0 + + org.springframework.cloud + spring-cloud-common-security-config + 2.11.6-SNAPSHOT + + spring-cloud-common-security-config-web + spring-cloud-common-security-config-web + Spring Cloud Common Security Config Web + jar + + true + 5.0.0-alpha.14 + 3.4.1 + + + + org.springframework.cloud + spring-cloud-common-security-config-core + ${project.version} + + + org.springframework.security + spring-security-oauth2-jose + + + org.springframework.security + spring-security-oauth2-resource-server + + + org.springframework + spring-webflux + + + io.projectreactor.netty + reactor-netty + + + org.springframework.boot + spring-boot-starter-security + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.session + spring-session-core + + + org.springframework.boot + spring-boot-starter-actuator + + + org.springframework.boot + spring-boot-starter-test + test + + + com.squareup.okhttp3 + mockwebserver3-junit5 + ${okhttp3.version} + test + + + com.squareup.okhttp3 + okhttp + ${okhttp3.version} + test + + + org.jetbrains.kotlin + kotlin-stdlib-jdk8 + 1.8.22 + test + + + org.jetbrains.kotlin + kotlin-stdlib + 1.8.22 + test + + + javax.validation + validation-api + + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + -Xdoclint:none + + + + javadoc + + jar + + package + + + + + + diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/AuthorizationProperties.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/AuthorizationProperties.java new file mode 100644 index 0000000000..8efea5f00e --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/AuthorizationProperties.java @@ -0,0 +1,142 @@ +/* + * Copyright 2016-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Holds configuration for the authorization aspects of security. + * + * @author Eric Bottard + * @author Gunnar Hillert + * @author Ilayaperumal Gopinathan + * @author Mike Heath + */ +public class AuthorizationProperties { + + private String externalAuthoritiesUrl; + + private List rules = new ArrayList<>(); + + private String dashboardUrl = "/dashboard"; + + private String loginUrl = "/#/login"; + + private String loginProcessingUrl = "/login"; + + private String logoutUrl = "/logout"; + + private String logoutSuccessUrl = "/logout-success.html"; + + private List permitAllPaths = new ArrayList<>(); + + private List authenticatedPaths = new ArrayList<>(); + + /** + * Role-mapping configuration per OAuth2 provider. + */ + private final Map providerRoleMappings = new HashMap<>(); + + private String defaultProviderId; + + public Map getProviderRoleMappings() { + return providerRoleMappings; + } + + public List getRules() { + return rules; + } + + public void setRules(List rules) { + this.rules = rules; + } + + public String getExternalAuthoritiesUrl() { + return externalAuthoritiesUrl; + } + + public void setExternalAuthoritiesUrl(String externalAuthoritiesUrl) { + this.externalAuthoritiesUrl = externalAuthoritiesUrl; + } + + public String getDashboardUrl() { + return dashboardUrl; + } + + public void setDashboardUrl(String dashboardUrl) { + this.dashboardUrl = dashboardUrl; + } + + public String getLoginUrl() { + return loginUrl; + } + + public void setLoginUrl(String loginUrl) { + this.loginUrl = loginUrl; + } + + public String getLoginProcessingUrl() { + return loginProcessingUrl; + } + + public void setLoginProcessingUrl(String loginProcessingUrl) { + this.loginProcessingUrl = loginProcessingUrl; + } + + public String getLogoutUrl() { + return logoutUrl; + } + + public void setLogoutUrl(String logoutUrl) { + this.logoutUrl = logoutUrl; + } + + public String getLogoutSuccessUrl() { + return logoutSuccessUrl; + } + + public void setLogoutSuccessUrl(String logoutSuccessUrl) { + this.logoutSuccessUrl = logoutSuccessUrl; + } + + public List getPermitAllPaths() { + return permitAllPaths; + } + + public void setPermitAllPaths(List permitAllPaths) { + this.permitAllPaths = permitAllPaths; + } + + public List getAuthenticatedPaths() { + return authenticatedPaths; + } + + public void setAuthenticatedPaths(List authenticatedPaths) { + this.authenticatedPaths = authenticatedPaths; + } + + public void setDefaultProviderId(String defaultProviderId) { + this.defaultProviderId = defaultProviderId; + } + + public String getDefaultProviderId() { + return defaultProviderId; + } + +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/CommonSecurityAutoConfiguration.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/CommonSecurityAutoConfiguration.java new file mode 100644 index 0000000000..702e6dd2db --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/CommonSecurityAutoConfiguration.java @@ -0,0 +1,34 @@ +/* + * Copyright 2018-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration; +import org.springframework.boot.autoconfigure.AutoConfigureBefore; +import org.springframework.boot.autoconfigure.security.oauth2.client.servlet.OAuth2ClientAutoConfiguration; +import org.springframework.boot.autoconfigure.security.oauth2.resource.servlet.OAuth2ResourceServerAutoConfiguration; +import org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; + +@Configuration(proxyBeanMethods = false) +@AutoConfigureBefore({ + SecurityAutoConfiguration.class, + ManagementWebSecurityAutoConfiguration.class, + OAuth2ClientAutoConfiguration.class, + OAuth2ResourceServerAutoConfiguration.class}) +@Import({IgnoreAllSecurityConfiguration.class, OAuthSecurityConfiguration.class}) +public class CommonSecurityAutoConfiguration { +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/IgnoreAllSecurityConfiguration.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/IgnoreAllSecurityConfiguration.java new file mode 100644 index 0000000000..ea3cd363cb --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/IgnoreAllSecurityConfiguration.java @@ -0,0 +1,48 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import org.springframework.cloud.common.security.support.OnOAuth2SecurityDisabled; +import org.springframework.context.annotation.Conditional; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.web.WebSecurityConfigurer; +import org.springframework.security.config.annotation.web.builders.WebSecurity; + +/** + * Spring Security {@link WebSecurityConfigurer} simply ignoring all paths conditionally if security is not enabled. + * + * The org.springframework.cloud.common.security.enabled=true property disables this configuration and + * fall back to the Spring Boot default security configuration. + * + * @author Janne Valkealahti + * @author Gunnar Hillert + * @author Christian Tzolov + * + */ +@Configuration +@Conditional(OnOAuth2SecurityDisabled.class) +public class IgnoreAllSecurityConfiguration implements WebSecurityConfigurer { + + @Override + public void init(WebSecurity builder) { + } + + @Override + public void configure(WebSecurity builder) { + builder.ignoring().antMatchers("/**"); + } + +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ManualOAuthAuthenticationProvider.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ManualOAuthAuthenticationProvider.java new file mode 100644 index 0000000000..29a09b4855 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ManualOAuthAuthenticationProvider.java @@ -0,0 +1,119 @@ +/* + * Copyright 2016-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import org.slf4j.LoggerFactory; + +import org.springframework.security.authentication.AuthenticationProvider; +import org.springframework.security.authentication.AuthenticationServiceException; +import org.springframework.security.authentication.BadCredentialsException; +import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.AuthenticationException; +import org.springframework.security.core.context.SecurityContext; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient; +import org.springframework.security.oauth2.client.endpoint.OAuth2PasswordGrantRequest; +import org.springframework.security.oauth2.client.registration.ClientRegistration; +import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository; +import org.springframework.security.oauth2.core.AuthorizationGrantType; +import org.springframework.security.oauth2.core.OAuth2AuthorizationException; +import org.springframework.security.oauth2.core.endpoint.OAuth2AccessTokenResponse; +import org.springframework.security.oauth2.server.resource.BearerTokenAuthenticationToken; +import org.springframework.security.oauth2.server.resource.authentication.OpaqueTokenAuthenticationProvider; +import org.springframework.security.oauth2.server.resource.introspection.OpaqueTokenIntrospector; +import org.springframework.web.client.ResourceAccessException; + +/** + * Provides a custom {@link AuthenticationProvider} that allows for authentication + * (username and password) against an OAuth Server using a {@code password grant}. + * + * @author Gunnar Hillert + */ +public class ManualOAuthAuthenticationProvider implements AuthenticationProvider { + + private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ManualOAuthAuthenticationProvider.class); + + private final OAuth2AccessTokenResponseClient oAuth2PasswordTokenResponseClient; + private final ClientRegistrationRepository clientRegistrationRepository; + private final AuthenticationProvider authenticationProvider; + private final String providerId; + + public ManualOAuthAuthenticationProvider( + OAuth2AccessTokenResponseClient oAuth2PasswordTokenResponseClient, + ClientRegistrationRepository clientRegistrationRepository, + OpaqueTokenIntrospector opaqueTokenIntrospector, + String providerId) { + + this.oAuth2PasswordTokenResponseClient = oAuth2PasswordTokenResponseClient; + this.clientRegistrationRepository = clientRegistrationRepository; + this.authenticationProvider = + new OpaqueTokenAuthenticationProvider(opaqueTokenIntrospector); + this.providerId = providerId; + } + + @Override + public Authentication authenticate(Authentication authentication) throws AuthenticationException { + final String username = authentication.getName(); + final String password = authentication.getCredentials().toString(); + + final ClientRegistration clientRegistration = clientRegistrationRepository.findByRegistrationId(providerId); + final ClientRegistration clientRegistrationPassword = ClientRegistration.withClientRegistration(clientRegistration).authorizationGrantType(AuthorizationGrantType.PASSWORD).build(); + + final OAuth2PasswordGrantRequest grantRequest = new OAuth2PasswordGrantRequest(clientRegistrationPassword, username, password); + final OAuth2AccessTokenResponse accessTokenResponse; + final String accessTokenUri = clientRegistration.getProviderDetails().getTokenUri(); + + try { + accessTokenResponse = oAuth2PasswordTokenResponseClient.getTokenResponse(grantRequest); + logger.warn("Authenticating user '{}' using accessTokenUri '{}'.", username, accessTokenUri); + } + catch (OAuth2AuthorizationException e) { + if (e.getCause() instanceof ResourceAccessException) { + final String errorMessage = String.format( + "While authenticating user '%s': " + "Unable to access accessTokenUri '%s'.", username, + accessTokenUri); + logger.error(errorMessage + " Error message: {}.", e.getCause().getMessage()); + throw new AuthenticationServiceException(errorMessage, e); + } + else { + throw new BadCredentialsException(String.format("Access denied for user '%s'.", username), e); + } + + } + + final BearerTokenAuthenticationToken authenticationRequest = new BearerTokenAuthenticationToken(accessTokenResponse.getAccessToken().getTokenValue()); + + Authentication newAuthentication = null; + try { + newAuthentication = this.authenticationProvider.authenticate(authenticationRequest); + SecurityContext context = SecurityContextHolder.createEmptyContext(); + context.setAuthentication(newAuthentication); + SecurityContextHolder.setContext(context); + } catch (AuthenticationException failed) { + SecurityContextHolder.clearContext(); + logger.warn("Authentication request for failed!", failed); + //this.authenticationFailureHandler.onAuthenticationFailure(request, response, failed); + } + + return newAuthentication; + } + + @Override + public boolean supports(Class authentication) { + return authentication.equals(UsernamePasswordAuthenticationToken.class); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/OAuthSecurityConfiguration.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/OAuthSecurityConfiguration.java new file mode 100644 index 0000000000..d10b25a9cd --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/OAuthSecurityConfiguration.java @@ -0,0 +1,495 @@ +/* + * Copyright 2016-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import java.net.URI; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication; +import org.springframework.boot.autoconfigure.security.SecurityProperties; +import org.springframework.boot.autoconfigure.security.oauth2.client.OAuth2ClientProperties; +import org.springframework.boot.autoconfigure.security.oauth2.resource.OAuth2ResourceServerProperties; +import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; +import org.springframework.cloud.common.security.support.AccessTokenClearingLogoutSuccessHandler; +import org.springframework.cloud.common.security.support.AuthoritiesMapper; +import org.springframework.cloud.common.security.support.CustomAuthoritiesOpaqueTokenIntrospector; +import org.springframework.cloud.common.security.support.CustomOAuth2OidcUserService; +import org.springframework.cloud.common.security.support.CustomPlainOAuth2UserService; +import org.springframework.cloud.common.security.support.DefaultAuthoritiesMapper; +import org.springframework.cloud.common.security.support.DefaultOAuth2TokenUtilsService; +import org.springframework.cloud.common.security.support.ExternalOauth2ResourceAuthoritiesMapper; +import org.springframework.cloud.common.security.support.MappingJwtGrantedAuthoritiesConverter; +import org.springframework.cloud.common.security.support.OnOAuth2SecurityEnabled; +import org.springframework.cloud.common.security.support.SecurityConfigUtils; +import org.springframework.cloud.common.security.support.SecurityStateBean; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.context.event.EventListener; +import org.springframework.core.convert.converter.Converter; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.security.authentication.AbstractAuthenticationToken; +import org.springframework.security.authentication.AuthenticationProvider; +import org.springframework.security.authentication.ProviderManager; +import org.springframework.security.authentication.event.AbstractAuthenticationFailureEvent; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; +import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; +import org.springframework.security.config.annotation.web.configurers.ExpressionUrlAuthorizationConfigurer; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClientManager; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClientProvider; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClientProviderBuilder; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClientService; +import org.springframework.security.oauth2.client.endpoint.DefaultPasswordTokenResponseClient; +import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient; +import org.springframework.security.oauth2.client.endpoint.OAuth2PasswordGrantRequest; +import org.springframework.security.oauth2.client.oidc.userinfo.OidcUserRequest; +import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository; +import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest; +import org.springframework.security.oauth2.client.userinfo.OAuth2UserService; +import org.springframework.security.oauth2.client.web.DefaultOAuth2AuthorizedClientManager; +import org.springframework.security.oauth2.client.web.OAuth2AuthorizedClientRepository; +import org.springframework.security.oauth2.client.web.reactive.function.client.ServletOAuth2AuthorizedClientExchangeFilterFunction; +import org.springframework.security.oauth2.core.oidc.user.OidcUser; +import org.springframework.security.oauth2.core.user.OAuth2User; +import org.springframework.security.oauth2.jwt.Jwt; +import org.springframework.security.oauth2.server.resource.authentication.JwtAuthenticationConverter; +import org.springframework.security.oauth2.server.resource.introspection.OpaqueTokenIntrospector; +import org.springframework.security.web.authentication.HttpStatusEntryPoint; +import org.springframework.security.web.authentication.LoginUrlAuthenticationEntryPoint; +import org.springframework.security.web.authentication.logout.LogoutSuccessHandler; +import org.springframework.security.web.authentication.www.BasicAuthenticationEntryPoint; +import org.springframework.security.web.authentication.www.BasicAuthenticationFilter; +import org.springframework.security.web.util.matcher.AnyRequestMatcher; +import org.springframework.security.web.util.matcher.MediaTypeRequestMatcher; +import org.springframework.security.web.util.matcher.RequestHeaderRequestMatcher; +import org.springframework.security.web.util.matcher.RequestMatcher; +import org.springframework.util.StringUtils; +import org.springframework.web.HttpMediaTypeNotAcceptableException; +import org.springframework.web.accept.HeaderContentNegotiationStrategy; +import org.springframework.web.context.request.NativeWebRequest; +import org.springframework.web.reactive.function.client.WebClient; + +/** + * Setup Spring Security OAuth for the Rest Endpoints of Spring Cloud Data Flow. + * + * @author Gunnar Hillert + * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis + */ +@Configuration(proxyBeanMethods = false) +@ConditionalOnClass(WebSecurityConfigurerAdapter.class) +@ConditionalOnMissingBean(WebSecurityConfigurerAdapter.class) +@ConditionalOnWebApplication(type = ConditionalOnWebApplication.Type.ANY) +@EnableWebSecurity +@Conditional(OnOAuth2SecurityEnabled.class) +@Import({ + OAuthSecurityConfiguration.OAuth2AccessTokenResponseClientConfig.class, + OAuthSecurityConfiguration.OAuth2AuthenticationFailureEventConfig.class, + OAuthSecurityConfiguration.OpaqueTokenIntrospectorConfig.class, + OAuthSecurityConfiguration.OidcUserServiceConfig.class, + OAuthSecurityConfiguration.PlainOauth2UserServiceConfig.class, + OAuthSecurityConfiguration.WebClientConfig.class, + OAuthSecurityConfiguration.AuthoritiesMapperConfig.class, + OAuthSecurityConfiguration.OAuth2TokenUtilsServiceConfig.class, + OAuthSecurityConfiguration.LogoutSuccessHandlerConfig.class, + OAuthSecurityConfiguration.ProviderManagerConfig.class, + OAuthSecurityConfiguration.AuthenticationProviderConfig.class +}) +public class OAuthSecurityConfiguration extends WebSecurityConfigurerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(OAuthSecurityConfiguration.class); + + @Autowired + protected OAuth2ClientProperties oauth2ClientProperties; + + @Autowired + protected SecurityStateBean securityStateBean; + + @Autowired + protected SecurityProperties securityProperties; + + @Autowired + protected ApplicationEventPublisher applicationEventPublisher; + + @Autowired + protected AuthorizationProperties authorizationProperties; + + @Autowired + protected OAuth2ResourceServerProperties oAuth2ResourceServerProperties; + + @Autowired + protected OAuth2UserService plainOauth2UserService; + + @Autowired + protected OAuth2UserService oidcUserService; + + @Autowired + protected LogoutSuccessHandler logoutSuccessHandler; + + protected OpaqueTokenIntrospector opaqueTokenIntrospector; + + protected ProviderManager providerManager; + + public AuthorizationProperties getAuthorizationProperties() { + return authorizationProperties; + } + + public void setAuthorizationProperties(AuthorizationProperties authorizationProperties) { + this.authorizationProperties = authorizationProperties; + } + + public OpaqueTokenIntrospector getOpaqueTokenIntrospector() { + return opaqueTokenIntrospector; + } + + @Autowired(required = false) + public void setOpaqueTokenIntrospector(OpaqueTokenIntrospector opaqueTokenIntrospector) { + this.opaqueTokenIntrospector = opaqueTokenIntrospector; + } + + public ProviderManager getProviderManager() { + return providerManager; + } + + @Autowired(required = false) + public void setProviderManager(ProviderManager providerManager) { + this.providerManager = providerManager; + } + + public OAuth2ResourceServerProperties getoAuth2ResourceServerProperties() { + return oAuth2ResourceServerProperties; + } + + public void setoAuth2ResourceServerProperties(OAuth2ResourceServerProperties oAuth2ResourceServerProperties) { + this.oAuth2ResourceServerProperties = oAuth2ResourceServerProperties; + } + + public SecurityStateBean getSecurityStateBean() { + return securityStateBean; + } + + public void setSecurityStateBean(SecurityStateBean securityStateBean) { + this.securityStateBean = securityStateBean; + } + + @Override + protected void configure(HttpSecurity http) throws Exception { + + final RequestMatcher textHtmlMatcher = new MediaTypeRequestMatcher( + new BrowserDetectingContentNegotiationStrategy(), + MediaType.TEXT_HTML); + + final BasicAuthenticationEntryPoint basicAuthenticationEntryPoint = new BasicAuthenticationEntryPoint(); + basicAuthenticationEntryPoint.setRealmName(SecurityConfigUtils.BASIC_AUTH_REALM_NAME); + basicAuthenticationEntryPoint.afterPropertiesSet(); + + if (opaqueTokenIntrospector != null) { + BasicAuthenticationFilter basicAuthenticationFilter = new BasicAuthenticationFilter( + providerManager, basicAuthenticationEntryPoint); + http.addFilter(basicAuthenticationFilter); + } + + this.authorizationProperties.getAuthenticatedPaths().add("/"); + this.authorizationProperties.getAuthenticatedPaths() + .add(dashboard(authorizationProperties, "/**")); + this.authorizationProperties.getAuthenticatedPaths() + .add(this.authorizationProperties.getDashboardUrl()); + this.authorizationProperties.getPermitAllPaths() + .add(this.authorizationProperties.getDashboardUrl()); + this.authorizationProperties.getPermitAllPaths() + .add(dashboard(authorizationProperties, "/**")); + ExpressionUrlAuthorizationConfigurer.ExpressionInterceptUrlRegistry security = + + http.authorizeRequests() + .antMatchers(this.authorizationProperties.getPermitAllPaths() + .toArray(new String[0])) + .permitAll() + .antMatchers(this.authorizationProperties.getAuthenticatedPaths() + .toArray(new String[0])) + .authenticated(); + security = SecurityConfigUtils.configureSimpleSecurity(security, this.authorizationProperties); + security.anyRequest().denyAll(); + + + http.httpBasic().and() + .logout() + .logoutSuccessHandler(logoutSuccessHandler) + .and().csrf().disable() + .exceptionHandling() + // for UI not to send basic auth header + .defaultAuthenticationEntryPointFor( + new HttpStatusEntryPoint(HttpStatus.UNAUTHORIZED), + new RequestHeaderRequestMatcher("X-Requested-With", "XMLHttpRequest")) + .defaultAuthenticationEntryPointFor( + new LoginUrlAuthenticationEntryPoint(this.authorizationProperties.getLoginProcessingUrl()), + textHtmlMatcher) + .defaultAuthenticationEntryPointFor(basicAuthenticationEntryPoint, AnyRequestMatcher.INSTANCE); + + http.oauth2Login().userInfoEndpoint() + .userService(this.plainOauth2UserService) + .oidcUserService(this.oidcUserService); + + if (opaqueTokenIntrospector != null) { + http.oauth2ResourceServer() + .opaqueToken() + .introspector(opaqueTokenIntrospector); + } + else if (oAuth2ResourceServerProperties.getJwt().getJwkSetUri() != null) { + http.oauth2ResourceServer() + .jwt() + .jwtAuthenticationConverter(grantedAuthoritiesExtractor()); + } + + this.securityStateBean.setAuthenticationEnabled(true); + } + + protected static String dashboard(AuthorizationProperties authorizationProperties, String path) { + return authorizationProperties.getDashboardUrl() + path; + } + + protected Converter grantedAuthoritiesExtractor() { + String providerId = calculateDefaultProviderId(authorizationProperties, oauth2ClientProperties); + ProviderRoleMapping providerRoleMapping = authorizationProperties.getProviderRoleMappings() + .get(providerId); + + JwtAuthenticationConverter jwtAuthenticationConverter = + new JwtAuthenticationConverter(); + + MappingJwtGrantedAuthoritiesConverter converter = new MappingJwtGrantedAuthoritiesConverter(); + converter.setAuthorityPrefix(""); + jwtAuthenticationConverter.setJwtGrantedAuthoritiesConverter(converter); + if (providerRoleMapping != null) { + converter.setAuthoritiesMapping(providerRoleMapping.getRoleMappings()); + converter.setGroupAuthoritiesMapping(providerRoleMapping.getGroupMappings()); + if (StringUtils.hasText(providerRoleMapping.getPrincipalClaimName())) { + jwtAuthenticationConverter.setPrincipalClaimName(providerRoleMapping.getPrincipalClaimName()); + } + } + return jwtAuthenticationConverter; + } + + private static String calculateDefaultProviderId(AuthorizationProperties authorizationProperties, OAuth2ClientProperties oauth2ClientProperties) { + if (authorizationProperties.getDefaultProviderId() != null) { + return authorizationProperties.getDefaultProviderId(); + } + else if (oauth2ClientProperties.getRegistration().size() == 1) { + return oauth2ClientProperties.getRegistration().entrySet().iterator().next() + .getKey(); + } + else if (oauth2ClientProperties.getRegistration().size() > 1 + && !StringUtils.hasText(authorizationProperties.getDefaultProviderId())) { + throw new IllegalStateException("defaultProviderId must be set if more than 1 Registration is provided."); + } + else { + throw new IllegalStateException("Unable to retrieve default provider id."); + } + } + + @Configuration(proxyBeanMethods = false) + @ConditionalOnProperty(prefix = "spring.security.oauth2.resourceserver.opaquetoken", value = "introspection-uri") + protected static class OpaqueTokenIntrospectorConfig { + @Bean + protected OpaqueTokenIntrospector opaqueTokenIntrospector(OAuth2ResourceServerProperties oAuth2ResourceServerProperties, + AuthoritiesMapper authoritiesMapper) { + return new CustomAuthoritiesOpaqueTokenIntrospector( + oAuth2ResourceServerProperties.getOpaquetoken().getIntrospectionUri(), + oAuth2ResourceServerProperties.getOpaquetoken().getClientId(), + oAuth2ResourceServerProperties.getOpaquetoken().getClientSecret(), + authoritiesMapper); + } + } + + @Configuration(proxyBeanMethods = false) + protected static class OidcUserServiceConfig { + @Bean + protected OAuth2UserService oidcUserService(AuthoritiesMapper authoritiesMapper) { + return new CustomOAuth2OidcUserService(authoritiesMapper); + } + } + + @Configuration(proxyBeanMethods = false) + protected static class PlainOauth2UserServiceConfig { + @Bean + protected OAuth2UserService plainOauth2UserService(AuthoritiesMapper authoritiesMapper) { + return new CustomPlainOAuth2UserService(authoritiesMapper); + } + } + + @Configuration(proxyBeanMethods = false) + protected static class OAuth2AuthorizedClientManagerConfig { + @Bean + protected OAuth2AuthorizedClientManager authorizedClientManager( + ClientRegistrationRepository clientRegistrationRepository, + OAuth2AuthorizedClientRepository authorizedClientRepository) { + OAuth2AuthorizedClientProvider authorizedClientProvider = + OAuth2AuthorizedClientProviderBuilder.builder() + .authorizationCode() + .refreshToken() + .clientCredentials() + .password() + .build(); + DefaultOAuth2AuthorizedClientManager authorizedClientManager = new DefaultOAuth2AuthorizedClientManager( + clientRegistrationRepository, authorizedClientRepository); + authorizedClientManager.setAuthorizedClientProvider(authorizedClientProvider); + return authorizedClientManager; + } + } + + @Configuration(proxyBeanMethods = false) + protected static class WebClientConfig { + @Bean + protected WebClient webClient(OAuth2AuthorizedClientManager authorizedClientManager) { + ServletOAuth2AuthorizedClientExchangeFilterFunction oauth2Client = + new ServletOAuth2AuthorizedClientExchangeFilterFunction(authorizedClientManager); + oauth2Client.setDefaultOAuth2AuthorizedClient(true); + return WebClient.builder() + .apply(oauth2Client.oauth2Configuration()) + .build(); + } + } + + @Configuration(proxyBeanMethods = false) + protected static class AuthoritiesMapperConfig { + @Bean + protected AuthoritiesMapper authorityMapper(AuthorizationProperties authorizationProperties, + OAuth2ClientProperties oAuth2ClientProperties) { + AuthoritiesMapper authorityMapper; + if (!StringUtils.hasText(authorizationProperties.getExternalAuthoritiesUrl())) { + authorityMapper = new DefaultAuthoritiesMapper( + authorizationProperties.getProviderRoleMappings(), + calculateDefaultProviderId(authorizationProperties, oAuth2ClientProperties)); + } + else { + authorityMapper = new ExternalOauth2ResourceAuthoritiesMapper( + URI.create(authorizationProperties.getExternalAuthoritiesUrl())); + } + return authorityMapper; + } + } + + @Configuration(proxyBeanMethods = false) + protected static class LogoutSuccessHandlerConfig { + @Bean + protected LogoutSuccessHandler logoutSuccessHandler(AuthorizationProperties authorizationProperties, + OAuth2TokenUtilsService oauth2TokenUtilsService) { + AccessTokenClearingLogoutSuccessHandler logoutSuccessHandler = + new AccessTokenClearingLogoutSuccessHandler(oauth2TokenUtilsService); + logoutSuccessHandler.setDefaultTargetUrl(dashboard(authorizationProperties, "/logout-success-oauth.html")); + return logoutSuccessHandler; + } + } + + @Configuration(proxyBeanMethods = false) + @ConditionalOnProperty(prefix = "spring.security.oauth2.resourceserver.opaquetoken", value = "introspection-uri") + protected static class AuthenticationProviderConfig { + + protected OpaqueTokenIntrospector opaqueTokenIntrospector; + + @Autowired(required = false) + public void setOpaqueTokenIntrospector(OpaqueTokenIntrospector opaqueTokenIntrospector) { + this.opaqueTokenIntrospector = opaqueTokenIntrospector; + } + + @Bean + protected AuthenticationProvider authenticationProvider( + OAuth2AccessTokenResponseClient oAuth2PasswordTokenResponseClient, + ClientRegistrationRepository clientRegistrationRepository, + AuthorizationProperties authorizationProperties, + OAuth2ClientProperties oauth2ClientProperties) { + return new ManualOAuthAuthenticationProvider( + oAuth2PasswordTokenResponseClient, + clientRegistrationRepository, + this.opaqueTokenIntrospector, + calculateDefaultProviderId(authorizationProperties, oauth2ClientProperties)); + + } + } + + @Configuration(proxyBeanMethods = false) + @ConditionalOnProperty(prefix = "spring.security.oauth2.resourceserver.opaquetoken", value = "introspection-uri") + protected static class ProviderManagerConfig { + private AuthenticationProvider authenticationProvider; + + protected AuthenticationProvider getAuthenticationProvider() { + return authenticationProvider; + } + + @Autowired(required = false) + protected void setAuthenticationProvider(AuthenticationProvider authenticationProvider) { + this.authenticationProvider = authenticationProvider; + } + + @Bean + protected ProviderManager providerManager() { + List providers = new ArrayList<>(); + providers.add(authenticationProvider); + return new ProviderManager(providers); + } + } + + @Configuration(proxyBeanMethods = false) + protected static class OAuth2TokenUtilsServiceConfig { + @Bean + protected OAuth2TokenUtilsService oauth2TokenUtilsService(OAuth2AuthorizedClientService oauth2AuthorizedClientService) { + return new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService); + } + } + + @Configuration(proxyBeanMethods = false) + protected static class OAuth2AuthenticationFailureEventConfig { + @EventListener + public void handleOAuth2AuthenticationFailureEvent( + AbstractAuthenticationFailureEvent authenticationFailureEvent) { + logger.warn("An authentication failure event occurred while accessing a REST resource that requires authentication.", + authenticationFailureEvent.getException()); + } + } + + @Configuration(proxyBeanMethods = false) + protected static class OAuth2AccessTokenResponseClientConfig { + @Bean + OAuth2AccessTokenResponseClient oAuth2PasswordTokenResponseClient() { + return new DefaultPasswordTokenResponseClient(); + } + } + + protected static class BrowserDetectingContentNegotiationStrategy extends HeaderContentNegotiationStrategy { + @Override + public List resolveMediaTypes(NativeWebRequest request) throws HttpMediaTypeNotAcceptableException { + final List supportedMediaTypes = super.resolveMediaTypes(request); + final String userAgent = request.getHeader(HttpHeaders.USER_AGENT); + if (userAgent != null && userAgent.contains("Mozilla/5.0") + && !supportedMediaTypes.contains(MediaType.APPLICATION_JSON)) { + return Collections.singletonList(MediaType.TEXT_HTML); + } + return Collections.singletonList(MediaType.APPLICATION_JSON); + } + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ProviderRoleMapping.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ProviderRoleMapping.java new file mode 100644 index 0000000000..fe679e6bc5 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ProviderRoleMapping.java @@ -0,0 +1,264 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.springframework.cloud.common.security.support.CoreSecurityRoles; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; + +/** + * Holds configuration for the authorization aspects of security. + * + * @author Gunnar Hillert + * + */ +public class ProviderRoleMapping { + + private String oauthScopePrefix = "dataflow."; + private String rolePrefix = "ROLE_"; + private String groupClaim = "roles"; + private boolean mapOauthScopes = false; + private boolean parseOauthScopePathParts = true; + private boolean mapGroupClaims = false; + private Map roleMappings = new HashMap<>(0); + private Map groupMappings = new HashMap<>(0); + private String principalClaimName; + + public ProviderRoleMapping() { + super(); + } + + public ProviderRoleMapping(boolean mapOauthScopes) { + this.mapOauthScopes = mapOauthScopes; + } + + public ProviderRoleMapping(boolean mapOauthScopes, Map roleMappings) { + Assert.notNull(roleMappings, "roleMappings must not be null."); + this.mapOauthScopes = mapOauthScopes; + this.roleMappings = roleMappings; + } + + public boolean isParseOauthScopePathParts() { + return parseOauthScopePathParts; + } + + /** + * Sets whether or not to treat OAuth scopes as URIs during the role mapping. + * When set to {@code true} the OAuth scope will be treated as a URI and the leading part will be ignored (eg. 'api://dataflow-server/dataflow.create' will result in 'dataflow.create'). + * When set to {@code false} the OAuth scope will be used as-is. This is useful in cases where the scope is not a URI and contains '/' leading characters. + * + * @param parseOauthScopePathParts whether or not to treat OAuth scopes as URIs during the role mapping + */ + public void setParseOauthScopePathParts(boolean parseOauthScopePathParts) { + this.parseOauthScopePathParts = parseOauthScopePathParts; + } + + public boolean isMapOauthScopes() { + return mapOauthScopes; + } + + /** + * If set to true, Oauth scopes will be mapped to corresponding Data Flow roles. + * Otherwise, if set to false, or not set at all, all roles will be assigned to users. + * + * @param mapOauthScopes If not set defaults to false + */ + public void setMapOauthScopes(boolean mapOauthScopes) { + this.mapOauthScopes = mapOauthScopes; + } + + public boolean isMapGroupClaims() { + return mapGroupClaims; + } + + public void setMapGroupClaims(boolean mapGroupClaims) { + this.mapGroupClaims = mapGroupClaims; + } + + /** + * When using OAuth2 with enabled {@link #setMapOauthScopes(boolean)}, you can optionally specify a custom + * mapping of OAuth scopes to role names as they exist in the Data Flow application. If not + * set, then the OAuth scopes themselves must match the role names: + * + *
    + *
  • MANAGE = dataflow.manage + *
  • VIEW = dataflow.view + *
  • CREATE = dataflow.create + *
+ * + * @return Optional (May be null). Returns a map of scope-to-role mappings. + */ + public Map getRoleMappings() { + return roleMappings; + } + + public ProviderRoleMapping addRoleMapping(String oauthScope, String roleName) { + this.roleMappings.put(oauthScope, roleName); + return this; + } + + public Map getGroupMappings() { + return groupMappings; + } + + public void setGroupMappings(Map groupMappings) { + this.groupMappings = groupMappings; + } + + public String getGroupClaim() { + return groupClaim; + } + + public void setGroupClaim(String groupClaim) { + this.groupClaim = groupClaim; + } + + public String getPrincipalClaimName() { + return principalClaimName; + } + + public void setPrincipalClaimName(String principalClaimName) { + this.principalClaimName = principalClaimName; + } + + public Map convertGroupMappingKeysToCoreSecurityRoles() { + + final Map groupMappings = new HashMap<>(0); + + if (CollectionUtils.isEmpty(this.groupMappings)) { + for (CoreSecurityRoles roleEnum : CoreSecurityRoles.values()) { + final String roleName = this.oauthScopePrefix + roleEnum.getKey(); + groupMappings.put(roleEnum, roleName); + } + return groupMappings; + } + + final List unmappedRoles = new ArrayList<>(0); + + for (CoreSecurityRoles coreRole : CoreSecurityRoles.values()) { + + final String coreSecurityRoleName; + if (this.rolePrefix.length() > 0 && !coreRole.getKey().startsWith(rolePrefix)) { + coreSecurityRoleName = rolePrefix + coreRole.getKey(); + } + else { + coreSecurityRoleName = coreRole.getKey(); + } + + final String oauthScope = this.groupMappings.get(coreSecurityRoleName); + + if (oauthScope == null) { + unmappedRoles.add(coreRole); + } + else { + groupMappings.put(coreRole, oauthScope); + } + } + + if (!unmappedRoles.isEmpty()) { + throw new IllegalArgumentException( + String.format("The following %s %s not mapped: %s.", + unmappedRoles.size(), + unmappedRoles.size() > 1 ? "roles are" : "role is", + StringUtils.collectionToDelimitedString(unmappedRoles, ", "))); + } + + return groupMappings; + } + + /** + * @return Map containing the {@link CoreSecurityRoles} as key and the associated role name (String) as value. + */ + public Map convertRoleMappingKeysToCoreSecurityRoles() { + + final Map roleMappings = new HashMap<>(0); + + if (CollectionUtils.isEmpty(this.roleMappings)) { + for (CoreSecurityRoles roleEnum : CoreSecurityRoles.values()) { + final String roleName = this.oauthScopePrefix + roleEnum.getKey(); + roleMappings.put(roleEnum, roleName); + } + return roleMappings; + } + + final List unmappedRoles = new ArrayList<>(0); + + for (CoreSecurityRoles coreRole : CoreSecurityRoles.values()) { + + final String coreSecurityRoleName; + if (this.rolePrefix.length() > 0 && !coreRole.getKey().startsWith(rolePrefix)) { + coreSecurityRoleName = rolePrefix + coreRole.getKey(); + } + else { + coreSecurityRoleName = coreRole.getKey(); + } + + final String oauthScope = this.roleMappings.get(coreSecurityRoleName); + + if (oauthScope == null) { + unmappedRoles.add(coreRole); + } + else { + roleMappings.put(coreRole, oauthScope); + } + } + + if (!unmappedRoles.isEmpty()) { + throw new IllegalArgumentException( + String.format("The following %s %s not mapped: %s.", + unmappedRoles.size(), + unmappedRoles.size() > 1 ? "roles are" : "role is", + StringUtils.collectionToDelimitedString(unmappedRoles, ", "))); + } + + return roleMappings; + } + + /** + * Sets the prefix which should be added to the authority name (if it doesn't already + * exist). + * + * @param rolePrefix Must not be null + * + */ + public void setRolePrefix(String rolePrefix) { + Assert.notNull(rolePrefix, "rolePrefix cannot be null"); + this.rolePrefix = rolePrefix; + } + + public String getOauthScopePrefix() { + return oauthScopePrefix; + } + + /** + * + * @param oauthScopePrefix Must not be null + */ + public void setOauthScopePrefix(String oauthScopePrefix) { + Assert.notNull(rolePrefix, "oauthScopePrefix cannot be null"); + this.oauthScopePrefix = oauthScopePrefix; + } + + public String getRolePrefix() { + return rolePrefix; + } +} diff --git a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/AnotherEnumClass13.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/package-info.java similarity index 83% rename from spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/AnotherEnumClass13.java rename to spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/package-info.java index 1260a504bf..458e8c5a6e 100644 --- a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/AnotherEnumClass13.java +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/package-info.java @@ -13,13 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -package com.acme.boot13; - /** - * An enum used in configuration properties class. + * Contains security related configuration classes. */ -public enum AnotherEnumClass13 { - low, - high; -} +package org.springframework.cloud.common.security; diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AccessTokenClearingLogoutSuccessHandler.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AccessTokenClearingLogoutSuccessHandler.java new file mode 100644 index 0000000000..409e6ea3e8 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AccessTokenClearingLogoutSuccessHandler.java @@ -0,0 +1,67 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.io.IOException; + +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; +import org.springframework.security.core.Authentication; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClient; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClientService; +import org.springframework.security.oauth2.client.authentication.OAuth2AuthenticationToken; +import org.springframework.security.web.authentication.logout.SimpleUrlLogoutSuccessHandler; +import org.springframework.util.Assert; + +/** + * Customized {@link SimpleUrlLogoutSuccessHandler} that will remove the previously authenticated user's + * {@link OAuth2AuthorizedClient} from the underlying {@link OAuth2AuthorizedClientService}. + * + * @author Gunnar Hillert + * @since 1.3.0 + */ +public class AccessTokenClearingLogoutSuccessHandler extends SimpleUrlLogoutSuccessHandler { + + private static final Logger logger = LoggerFactory.getLogger(AccessTokenClearingLogoutSuccessHandler.class); + + final OAuth2TokenUtilsService oauth2TokenUtilsService; + + public AccessTokenClearingLogoutSuccessHandler(OAuth2TokenUtilsService oauth2TokenUtilsService) { + Assert.notNull(oauth2TokenUtilsService, "oauth2TokenUtilsService must not be null."); + this.oauth2TokenUtilsService = oauth2TokenUtilsService; + } + + @Override + public void onLogoutSuccess(HttpServletRequest request, HttpServletResponse response, + Authentication authentication) throws IOException, ServletException { + + if (authentication instanceof OAuth2AuthenticationToken) { + final OAuth2AuthenticationToken oauth2AuthenticationToken = (OAuth2AuthenticationToken) authentication; + final OAuth2AuthorizedClient oauth2AuthorizedClient = oauth2TokenUtilsService.getAuthorizedClient(oauth2AuthenticationToken); + oauth2TokenUtilsService.removeAuthorizedClient(oauth2AuthorizedClient); + logger.info("Removed OAuth2AuthorizedClient."); + } + + super.handle(request, response, authentication); + } + +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AuthoritiesMapper.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AuthoritiesMapper.java new file mode 100644 index 0000000000..70e8be71a3 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AuthoritiesMapper.java @@ -0,0 +1,52 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Collections; +import java.util.List; +import java.util.Set; + +import org.springframework.security.core.GrantedAuthority; + +/** + * Maps scopes and claims into authorities. + * + * @author Gunnar Hillert + * @author Janne Valkealahti + */ +public interface AuthoritiesMapper { + + /** + * Map the provided scopes to authorities. + * + * @param providerId If null, then the default providerId is used + * @param scopes the scopes to map + * @param token some implementation may need to make additional requests + * @return the mapped authorities + */ + Set mapScopesToAuthorities(String providerId, Set scopes, String token); + + /** + * Map the provided claims to authorities. + * + * @param providerId If null, then the default providerId is used + * @param claims the claims to map + * @return the mapped authorities + */ + default Set mapClaimsToAuthorities(String providerId, List claims) { + return Collections.emptySet(); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CoreSecurityRoles.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CoreSecurityRoles.java new file mode 100644 index 0000000000..c8a3a77206 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CoreSecurityRoles.java @@ -0,0 +1,77 @@ +/* + * Copyright 2017-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Arrays; + +import org.springframework.util.Assert; + +/** + * Defines the core security roles supported by Spring Cloud Security. + * + * @author Gunnar Hillert + */ +public enum CoreSecurityRoles { + + CREATE("CREATE", "role for create operations"), + DEPLOY("DEPLOY", "role for deploy operations"), + DESTROY("DESTROY", "role for destroy operations"), + MANAGE("MANAGE", "role for the boot management endpoints"), + MODIFY("MODIFY", "role for modify operations"), + SCHEDULE("SCHEDULE", "role for scheduling operations"), + VIEW("VIEW", "view role"); + + private String key; + + private String name; + + CoreSecurityRoles(final String key, final String name) { + this.key = key; + this.name = name; + } + + public static CoreSecurityRoles fromKey(String role) { + + Assert.hasText(role, "Parameter role must not be null or empty."); + + for (CoreSecurityRoles roleType : CoreSecurityRoles.values()) { + if (roleType.getKey().equals(role)) { + return roleType; + } + } + + return null; + } + + /** + * Helper class that will return all role names as a string array. + * + * @return Never null + */ + public static String[] getAllRolesAsStringArray() { + return Arrays.stream(CoreSecurityRoles.values()).map(CoreSecurityRoles::getKey) + .toArray(size -> new String[size]); + } + + public String getKey() { + return key; + } + + public String getName() { + return name; + } + +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomAuthoritiesOpaqueTokenIntrospector.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomAuthoritiesOpaqueTokenIntrospector.java new file mode 100644 index 0000000000..2d914ee09e --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomAuthoritiesOpaqueTokenIntrospector.java @@ -0,0 +1,82 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.oauth2.core.DefaultOAuth2AuthenticatedPrincipal; +import org.springframework.security.oauth2.core.OAuth2AuthenticatedPrincipal; +import org.springframework.security.oauth2.server.resource.introspection.NimbusOpaqueTokenIntrospector; +import org.springframework.security.oauth2.server.resource.introspection.OAuth2IntrospectionClaimNames; +import org.springframework.security.oauth2.server.resource.introspection.OpaqueTokenIntrospector; + +/** + * + * @author Gunnar Hillert + * @since 1.3.0 + */ +public class CustomAuthoritiesOpaqueTokenIntrospector implements OpaqueTokenIntrospector { + + private static final Logger logger = LoggerFactory.getLogger(CustomAuthoritiesOpaqueTokenIntrospector.class); + private final OpaqueTokenIntrospector delegate; + private DefaultPrincipalExtractor principalExtractor; + private AuthoritiesMapper authorityMapper; + + public CustomAuthoritiesOpaqueTokenIntrospector( + String introspectionUri, + String clientId, + String clientSecret, + AuthoritiesMapper authorityMapper) { + this.delegate = new NimbusOpaqueTokenIntrospector(introspectionUri, clientId, clientSecret); + this.principalExtractor = new DefaultPrincipalExtractor(); + this.authorityMapper = authorityMapper; + } + + @Override + public OAuth2AuthenticatedPrincipal introspect(String token) { + logger.debug("Introspecting"); + OAuth2AuthenticatedPrincipal principal = this.delegate.introspect(token); + Object principalName = principalExtractor.extractPrincipal(principal.getAttributes()); + return new DefaultOAuth2AuthenticatedPrincipal( + principalName.toString(), principal.getAttributes(), extractAuthorities(principal, token)); + } + + private Collection extractAuthorities(OAuth2AuthenticatedPrincipal principal, String token) { + final List scopes = principal.getAttribute(OAuth2IntrospectionClaimNames.SCOPE); + final Set scopesAsSet = new HashSet<>(scopes); + final Set authorities = this.authorityMapper.mapScopesToAuthorities(null, scopesAsSet, token); + final Set authorities2 = this.authorityMapper.mapClaimsToAuthorities(null, Arrays.asList("groups", "roles")); + authorities.addAll(authorities2); + return authorities; + } + + public void setPrincipalExtractor(DefaultPrincipalExtractor principalExtractor) { + this.principalExtractor = principalExtractor; + } + + public void setAuthorityMapper(AuthoritiesMapper authorityMapper) { + this.authorityMapper = authorityMapper; + } + +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomOAuth2OidcUserService.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomOAuth2OidcUserService.java new file mode 100644 index 0000000000..7ba93044f1 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomOAuth2OidcUserService.java @@ -0,0 +1,90 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.oauth2.client.oidc.userinfo.OidcUserRequest; +import org.springframework.security.oauth2.client.oidc.userinfo.OidcUserService; +import org.springframework.security.oauth2.client.userinfo.OAuth2UserService; +import org.springframework.security.oauth2.core.OAuth2AccessToken; +import org.springframework.security.oauth2.core.OAuth2AuthenticationException; +import org.springframework.security.oauth2.core.oidc.user.DefaultOidcUser; +import org.springframework.security.oauth2.core.oidc.user.OidcUser; +import org.springframework.util.StringUtils; + +/** + * + * @author Gunnar Hillert + * @author Janne Valkealahti + */ +public class CustomOAuth2OidcUserService implements OAuth2UserService { + + private final static Logger log = LoggerFactory.getLogger(CustomOAuth2OidcUserService.class); + final OidcUserService delegate = new OidcUserService(); + final AuthoritiesMapper authorityMapper; + + public CustomOAuth2OidcUserService(AuthoritiesMapper authorityMapper) { + this.authorityMapper = authorityMapper; + } + + @Override + public OidcUser loadUser(OidcUserRequest userRequest) throws OAuth2AuthenticationException { + log.debug("Load user"); + final OidcUser oidcUser = delegate.loadUser(userRequest); + final OAuth2AccessToken accessToken = userRequest.getAccessToken(); + final Set mappedAuthorities1 = this.authorityMapper.mapScopesToAuthorities( + userRequest.getClientRegistration().getRegistrationId(), accessToken.getScopes(), + accessToken.getTokenValue()); + + List roleClaims = oidcUser.getClaimAsStringList("groups"); + if (roleClaims == null) { + roleClaims = oidcUser.getClaimAsStringList("roles"); + } + if (roleClaims == null) { + roleClaims = new ArrayList<>(); + } + log.debug("roleClaims: {}", roleClaims); + Set mappedAuthorities2 = this.authorityMapper + .mapClaimsToAuthorities(userRequest.getClientRegistration().getRegistrationId(), roleClaims); + + final String userNameAttributeName = userRequest.getClientRegistration() + .getProviderDetails().getUserInfoEndpoint().getUserNameAttributeName(); + + log.debug("AccessToken: {}", accessToken.getTokenValue()); + + HashSet mappedAuthorities = new HashSet<>(mappedAuthorities1); + mappedAuthorities.addAll(mappedAuthorities2); + + final OidcUser oidcUserToReturn; + // OidcUser oidcUserToReturn; + + if (StringUtils.hasText(userNameAttributeName)) { + oidcUserToReturn = new DefaultOidcUser(mappedAuthorities, userRequest.getIdToken(), oidcUser.getUserInfo(), + userNameAttributeName); + } else { + oidcUserToReturn = new DefaultOidcUser(mappedAuthorities, userRequest.getIdToken(), oidcUser.getUserInfo()); + } + return oidcUserToReturn; + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomPlainOAuth2UserService.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomPlainOAuth2UserService.java new file mode 100644 index 0000000000..249f6d6688 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomPlainOAuth2UserService.java @@ -0,0 +1,63 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.oauth2.client.userinfo.DefaultOAuth2UserService; +import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest; +import org.springframework.security.oauth2.client.userinfo.OAuth2UserService; +import org.springframework.security.oauth2.core.OAuth2AccessToken; +import org.springframework.security.oauth2.core.OAuth2AuthenticationException; +import org.springframework.security.oauth2.core.user.DefaultOAuth2User; +import org.springframework.security.oauth2.core.user.OAuth2User; + +/** + * + * @author Gunnar Hillert + * @author Janne Valkealahti + */ +public class CustomPlainOAuth2UserService implements OAuth2UserService { + + private final static Logger log = LoggerFactory.getLogger(CustomPlainOAuth2UserService.class); + final DefaultOAuth2UserService delegate = new DefaultOAuth2UserService(); + final AuthoritiesMapper authorityMapper; + + public CustomPlainOAuth2UserService(AuthoritiesMapper authorityMapper) { + this.authorityMapper = authorityMapper; + } + + @Override + public OAuth2User loadUser(OAuth2UserRequest userRequest) throws OAuth2AuthenticationException { + log.debug("Load user"); + final OAuth2User oauth2User = delegate.loadUser(userRequest); + final OAuth2AccessToken accessToken = userRequest.getAccessToken(); + log.debug("AccessToken: {}", accessToken.getTokenValue()); + + final Set mappedAuthorities = this.authorityMapper.mapScopesToAuthorities( + userRequest.getClientRegistration().getRegistrationId(), accessToken.getScopes(), + accessToken.getTokenValue()); + final String userNameAttributeName = userRequest.getClientRegistration() + .getProviderDetails().getUserInfoEndpoint().getUserNameAttributeName(); + final OAuth2User oauth2UserToReturn = new DefaultOAuth2User(mappedAuthorities, oauth2User.getAttributes(), + userNameAttributeName); + return oauth2UserToReturn; + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapper.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapper.java new file mode 100644 index 0000000000..b5e9dc82e4 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapper.java @@ -0,0 +1,233 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.net.URI; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.common.security.ProviderRoleMapping; +import org.springframework.security.config.core.GrantedAuthorityDefaults; +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Default {@link AuthoritiesMapper}. + * + * @author Gunnar Hillert + * @author Janne Valkealahti + */ +public class DefaultAuthoritiesMapper implements AuthoritiesMapper { + + private static final Logger logger = LoggerFactory.getLogger(DefaultAuthoritiesMapper.class); + private final Map providerRoleMappings; + private final String defaultProviderId; + + public DefaultAuthoritiesMapper(Map providerRoleMappings, String defaultProviderId) { + super(); + + Assert.notNull(providerRoleMappings, "providerRoleMappings must not be null."); + for (Entry providerRoleMappingToValidate : providerRoleMappings.entrySet()) { + providerRoleMappingToValidate.getValue().convertRoleMappingKeysToCoreSecurityRoles(); + } + + this.providerRoleMappings = providerRoleMappings; + this.defaultProviderId = defaultProviderId; + } + + /** + * Convenience constructor that will create a {@link DefaultAuthoritiesMapper} with a + * single {@link ProviderRoleMapping}. + * + * @param providerId Create a ProviderRoleMapping with the specified providerId + * @param mapOAuthScopes Shall OAuth scopes be considered? + * @param roleMappings Used to populate the ProviderRoleMapping + */ + public DefaultAuthoritiesMapper(String providerId, boolean mapOAuthScopes, Map roleMappings) { + Assert.hasText(providerId, "The providerId must not be null or empty."); + final ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping(mapOAuthScopes, roleMappings); + this.providerRoleMappings = new HashMap(1); + this.providerRoleMappings.put(providerId, providerRoleMapping); + for (ProviderRoleMapping providerRoleMappingToValidate : providerRoleMappings.values()) { + providerRoleMappingToValidate.convertRoleMappingKeysToCoreSecurityRoles(); + } + this.defaultProviderId = providerId; + } + + /** + * Convenience constructor that will create a {@link DefaultAuthoritiesMapper} with a + * single {@link ProviderRoleMapping}. + * + * @param providerId The provider id for the ProviderRoleMapping + * @param mapOAuthScopes Consider scopes? + */ + public DefaultAuthoritiesMapper(String providerId, boolean mapOAuthScopes) { + Assert.hasText(providerId, "The providerId must not be null or empty."); + final ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping(mapOAuthScopes); + this.providerRoleMappings = new HashMap(1); + this.providerRoleMappings.put(providerId, providerRoleMapping); + for (ProviderRoleMapping providerRoleMappingToValidate : providerRoleMappings.values()) { + providerRoleMappingToValidate.convertRoleMappingKeysToCoreSecurityRoles(); + } + this.defaultProviderId = providerId; + } + + /** + * Convenience constructor that will create a {@link DefaultAuthoritiesMapper} with a + * single {@link ProviderRoleMapping}. + * + * @param providerId The provider id for the ProviderRoleMapping + * @param providerRoleMapping The role mappings to add to the {@link ProviderRoleMapping} + */ + public DefaultAuthoritiesMapper(String providerId, ProviderRoleMapping providerRoleMapping) { + this.providerRoleMappings = new HashMap(1); + this.providerRoleMappings.put(providerId, providerRoleMapping); + for (ProviderRoleMapping providerRoleMappingToValidate : providerRoleMappings.values()) { + providerRoleMappingToValidate.convertRoleMappingKeysToCoreSecurityRoles(); + } + this.defaultProviderId = providerId; + } + + /** + * The returned {@link List} of {@link GrantedAuthority}s contains all roles from + * {@link CoreSecurityRoles}. The roles are prefixed with the value specified in + * {@link GrantedAuthorityDefaults}. + * + * @param clientIdParam If null, the default defaultProviderId is used + * @param scopes Must not be null + * @param token Ignored in this implementation + */ + @Override + public Set mapScopesToAuthorities(String clientIdParam, Set scopes, String token) { + logger.debug("Mapping scopes to authorities"); + final String clientId; + if (clientIdParam == null) { + clientId = this.defaultProviderId; + } + else { + clientId = clientIdParam; + } + Assert.notNull(scopes, "The scopes argument must not be null."); + + final ProviderRoleMapping roleMapping = this.providerRoleMappings.get(clientId); + + if (roleMapping == null) { + throw new IllegalArgumentException("No role mapping found for clientId " + clientId); + } + + final List rolesAsStrings = new ArrayList<>(); + + Set grantedAuthorities = new HashSet<>(); + + if (roleMapping.isMapOauthScopes()) { + if (!scopes.isEmpty()) { + for (Map.Entry roleMappingEngtry : roleMapping.convertRoleMappingKeysToCoreSecurityRoles().entrySet()) { + final CoreSecurityRoles role = roleMappingEngtry.getKey(); + final String expectedOAuthScope = roleMappingEngtry.getValue(); + Set scopeList = roleMapping.isParseOauthScopePathParts() ? pathParts(scopes) : scopes; + for (String scope : scopeList) { + if (scope.equalsIgnoreCase(expectedOAuthScope)) { + final SimpleGrantedAuthority oauthRoleAuthority = new SimpleGrantedAuthority(roleMapping.getRolePrefix() + role.getKey()); + rolesAsStrings.add(oauthRoleAuthority.getAuthority()); + grantedAuthorities.add(oauthRoleAuthority); + } + } + } + logger.info("Adding roles: {}.", StringUtils.collectionToCommaDelimitedString(rolesAsStrings)); + } + } + else if (!roleMapping.isMapGroupClaims()) { + grantedAuthorities = + roleMapping.convertRoleMappingKeysToCoreSecurityRoles().entrySet().stream().map(mapEntry -> { + final CoreSecurityRoles role = mapEntry.getKey(); + rolesAsStrings.add(role.getKey()); + return new SimpleGrantedAuthority(roleMapping.getRolePrefix() + mapEntry.getKey()); + }).collect(Collectors.toSet()); + logger.info("Adding ALL roles: {}.", StringUtils.collectionToCommaDelimitedString(rolesAsStrings)); + } + return grantedAuthorities; + } + + @Override + public Set mapClaimsToAuthorities(String clientIdParam, List claims) { + logger.debug("Mapping claims to authorities"); + final String clientId; + if (clientIdParam == null) { + clientId = this.defaultProviderId; + } + else { + clientId = clientIdParam; + } + + final ProviderRoleMapping groupMapping = this.providerRoleMappings.get(clientId); + if (groupMapping == null) { + throw new IllegalArgumentException("No role mapping found for clientId " + clientId); + } + + final List rolesAsStrings = new ArrayList<>(); + final Set grantedAuthorities = new HashSet<>(); + + if (groupMapping.isMapGroupClaims()) { + if (!claims.isEmpty()) { + for (Map.Entry roleMappingEngtry : groupMapping.convertGroupMappingKeysToCoreSecurityRoles().entrySet()) { + final CoreSecurityRoles role = roleMappingEngtry.getKey(); + final String expectedOAuthScope = roleMappingEngtry.getValue(); + logger.debug("Checking group mapping {} {}", role, expectedOAuthScope); + for (String claim : claims) { + logger.debug("Checking against claim {} {}", claim, expectedOAuthScope); + if (claim.equalsIgnoreCase(expectedOAuthScope)) { + final SimpleGrantedAuthority oauthRoleAuthority = new SimpleGrantedAuthority(groupMapping.getRolePrefix() + role.getKey()); + rolesAsStrings.add(oauthRoleAuthority.getAuthority()); + grantedAuthorities.add(oauthRoleAuthority); + logger.debug("Adding to granted authorities {}", oauthRoleAuthority); + } + } + } + logger.info("Adding groups: {}.", StringUtils.collectionToCommaDelimitedString(rolesAsStrings)); + } + } + + return grantedAuthorities; + } + + private Set pathParts(Set scopes) { + // String away leading part if scope is something like + // api://dataflow-server/dataflow.create resulting dataflow.create + return scopes.stream().map(scope -> { + try { + URI uri = URI.create(scope); + String path = uri.getPath(); + if (StringUtils.hasText(path) && path.charAt(0) == '/') { + return path.substring(1); + } + } catch (Exception e) { + } + return scope; + }) + .collect(Collectors.toSet()); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultOAuth2TokenUtilsService.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultOAuth2TokenUtilsService.java new file mode 100644 index 0000000000..063c6b7917 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultOAuth2TokenUtilsService.java @@ -0,0 +1,110 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClient; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClientService; +import org.springframework.security.oauth2.client.authentication.OAuth2AuthenticationToken; +import org.springframework.security.oauth2.core.AbstractOAuth2Token; +import org.springframework.security.oauth2.server.resource.authentication.BearerTokenAuthentication; +import org.springframework.security.oauth2.server.resource.authentication.JwtAuthenticationToken; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Utility methods for retrieving access tokens. + * + * @author Gunnar Hillert + */ +public class DefaultOAuth2TokenUtilsService implements OAuth2TokenUtilsService { + + private final OAuth2AuthorizedClientService oauth2AuthorizedClientService; + + public DefaultOAuth2TokenUtilsService(OAuth2AuthorizedClientService oauth2AuthorizedClientService) { + Assert.notNull(oauth2AuthorizedClientService, "oauth2AuthorizedClientService must not be null."); + this.oauth2AuthorizedClientService = oauth2AuthorizedClientService; + } + + /** + * Retrieves the access token from the {@link Authentication} implementation. + * + * @return May return null. + */ + @Override + public String getAccessTokenOfAuthenticatedUser() { + + final Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); + + if (authentication == null) { + throw new IllegalStateException("Cannot retrieve the authentication object from the SecurityContext. Are you authenticated?"); + } + + final String accessTokenOfAuthenticatedUser; + + if (authentication instanceof BearerTokenAuthentication) { + accessTokenOfAuthenticatedUser = ((BearerTokenAuthentication) authentication).getToken().getTokenValue(); + } + else if (authentication instanceof OAuth2AuthenticationToken) { + final OAuth2AuthenticationToken oauth2AuthenticationToken = (OAuth2AuthenticationToken) authentication; + final OAuth2AuthorizedClient oauth2AuthorizedClient = this.getAuthorizedClient(oauth2AuthenticationToken); + accessTokenOfAuthenticatedUser = oauth2AuthorizedClient.getAccessToken().getTokenValue(); + } + else if (authentication instanceof JwtAuthenticationToken) { + AbstractOAuth2Token token = (AbstractOAuth2Token) authentication.getCredentials(); + accessTokenOfAuthenticatedUser = token.getTokenValue(); + } + else { + throw new IllegalStateException("Unsupported authentication object type " + authentication); + } + + return accessTokenOfAuthenticatedUser; + } + + @Override + public OAuth2AuthorizedClient getAuthorizedClient(OAuth2AuthenticationToken auth2AuthenticationToken) { + + final String principalName = auth2AuthenticationToken.getName(); + final String clientRegistrationId = auth2AuthenticationToken.getAuthorizedClientRegistrationId(); + + if (!StringUtils.hasText(principalName)) { + throw new IllegalStateException("The retrieved principalName must not be null or empty."); + } + + if (!StringUtils.hasText(clientRegistrationId)) { + throw new IllegalStateException("The retrieved clientRegistrationId must not be null or empty."); + } + + final OAuth2AuthorizedClient oauth2AuthorizedClient = this.oauth2AuthorizedClientService.loadAuthorizedClient(clientRegistrationId, principalName); + + if (oauth2AuthorizedClient == null) { + throw new IllegalStateException(String.format( + "No oauth2AuthorizedClient returned for clientRegistrationId '%s' and principalName '%s'.", + clientRegistrationId, principalName)); + } + return oauth2AuthorizedClient; + } + + @Override + public void removeAuthorizedClient(OAuth2AuthorizedClient auth2AuthorizedClient) { + Assert.notNull(auth2AuthorizedClient, "The auth2AuthorizedClient must not be null."); + this.oauth2AuthorizedClientService.removeAuthorizedClient( + auth2AuthorizedClient.getClientRegistration().getRegistrationId(), + auth2AuthorizedClient.getPrincipalName()); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultPrincipalExtractor.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultPrincipalExtractor.java new file mode 100644 index 0000000000..a8d5254993 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultPrincipalExtractor.java @@ -0,0 +1,41 @@ +/* + * Copyright 2018-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Map; + +/** + * The default implementation of the {@link PrincipalExtractor} that extracts the username + * of the principal. + * + * @author Gunnar Hillert + * + */ +public class DefaultPrincipalExtractor implements PrincipalExtractor { + + private static final String[] PRINCIPAL_KEYS = new String[] { "user_name", "user", "username", + "userid", "user_id", "login", "id", "name", "cid", "client_id" }; + + @Override + public Object extractPrincipal(Map map) { + for (String key : PRINCIPAL_KEYS) { + if (map.containsKey(key)) { + return map.get(key); + } + } + return null; + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapper.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapper.java new file mode 100644 index 0000000000..de7270da44 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapper.java @@ -0,0 +1,131 @@ +/* + * Copyright 2018-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.net.URI; +import java.util.HashSet; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.ResponseEntity; +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.security.oauth2.client.http.OAuth2ErrorResponseErrorHandler; +import org.springframework.security.oauth2.core.OAuth2AccessToken; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; +import org.springframework.web.client.RestOperations; +import org.springframework.web.client.RestTemplate; + +/** + * {@link AuthoritiesMapper} that looks up + * {@link CoreSecurityRoles} from an external HTTP resource. Requests to the + * external HTTP resource are authenticated by forwarding the user's access + * token. The external resource's response body MUST be a JSON array + * containing strings with values corresponding to + * {@link CoreSecurityRoles#key} values. For example, a response containing + * {@code ["VIEW", "CREATE"]} would grant the user + * {@code ROLE_VIEW, ROLE_CREATE}, + * + * @author Mike Heath + * @author Gunnar Hillert + */ +public class ExternalOauth2ResourceAuthoritiesMapper implements AuthoritiesMapper { + + private static final Logger logger = LoggerFactory.getLogger(ExternalOauth2ResourceAuthoritiesMapper.class); + + public static final GrantedAuthority CREATE = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.CREATE.getKey()); + public static final GrantedAuthority DEPLOY = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.DEPLOY.getKey()); + public static final GrantedAuthority DESTROY = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.DESTROY.getKey()); + public static final GrantedAuthority MANAGE = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.MANAGE.getKey()); + public static final GrantedAuthority MODIFY = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.MODIFY.getKey()); + public static final GrantedAuthority SCHEDULE = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.SCHEDULE.getKey()); + public static final GrantedAuthority VIEW = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.VIEW.getKey()); + + private final URI roleProviderUri; + private final RestOperations restOperations; + + /** + * + * @param roleProviderUri a HTTP GET request is sent to this URI to fetch + * the user's security roles + */ + public ExternalOauth2ResourceAuthoritiesMapper( + URI roleProviderUri) { + Assert.notNull(roleProviderUri, "The provided roleProviderUri must not be null."); + this.roleProviderUri = roleProviderUri; + + final RestTemplate restTemplate = new RestTemplate(); + restTemplate.setErrorHandler(new OAuth2ErrorResponseErrorHandler()); + this.restOperations = restTemplate; + } + + + @Override + public Set mapScopesToAuthorities(String providerId, Set scopes, String token) { + logger.debug("Getting permissions from {}", roleProviderUri); + + final HttpHeaders headers = new HttpHeaders(); + headers.add(HttpHeaders.AUTHORIZATION, OAuth2AccessToken.TokenType.BEARER.getValue() + " " + token); + + final HttpEntity entity = new HttpEntity<>(null, headers); + final ResponseEntity response = restOperations.exchange(roleProviderUri, HttpMethod.GET, entity, String[].class); + + final Set authorities = new HashSet<>(); + for (String permission : response.getBody()) { + if (!StringUtils.hasText(permission)) { + logger.warn("Received an empty permission from {}", roleProviderUri); + } else { + final CoreSecurityRoles securityRole = CoreSecurityRoles.fromKey(permission.toUpperCase()); + if (securityRole == null) { + logger.warn("Invalid role {} provided by {}", permission, roleProviderUri); + } else { + switch (securityRole) { + case CREATE: + authorities.add(CREATE); + break; + case DEPLOY: + authorities.add(DEPLOY); + break; + case DESTROY: + authorities.add(DESTROY); + break; + case MANAGE: + authorities.add(MANAGE); + break; + case MODIFY: + authorities.add(MODIFY); + break; + case SCHEDULE: + authorities.add(SCHEDULE); + break; + case VIEW: + authorities.add(VIEW); + break; + } + } + } + } + logger.info("Roles added for user: {}.", authorities); + return authorities; + } +} + diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverter.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverter.java new file mode 100644 index 0000000000..e31c908e8a --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverter.java @@ -0,0 +1,201 @@ +/* + * Copyright 2020-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.common.security.support; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.security.oauth2.jwt.Jwt; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * Extracts the {@link GrantedAuthority}s from scope attributes typically found + * in a {@link Jwt}. + * + * @author Gunnar Hillert + * @author Janne Valkealahti + */ +public final class MappingJwtGrantedAuthoritiesConverter implements Converter> { + + private final static Logger log = LoggerFactory.getLogger(MappingJwtGrantedAuthoritiesConverter.class); + private static final String DEFAULT_AUTHORITY_PREFIX = "SCOPE_"; + + private static final Collection WELL_KNOWN_SCOPES_CLAIM_NAMES = + Arrays.asList("scope", "scp"); + private static final Collection WELL_KNOWN_GROUPS_CLAIM_NAMES = + Arrays.asList("groups", "roles"); + + private String authorityPrefix = DEFAULT_AUTHORITY_PREFIX; + + private String authoritiesClaimName; + private String groupAuthoritiesClaimName; + + private Map roleAuthoritiesMapping = new HashMap<>(); + private Map groupAuthoritiesMapping = new HashMap<>(); + + /** + * Extract {@link GrantedAuthority}s from the given {@link Jwt}. + * + * @param jwt The {@link Jwt} token + * @return The {@link GrantedAuthority authorities} read from the token scopes + */ + @Override + public Collection convert(Jwt jwt) { + log.debug("JWT: {}", jwt.getTokenValue()); + Set collect = getAuthorities(jwt).stream() + .flatMap(authority -> { + if (roleAuthoritiesMapping.isEmpty() && groupAuthoritiesMapping.isEmpty()) { + return Stream.of(authority); + } + Stream s1 = roleAuthoritiesMapping.entrySet().stream() + .filter(entry -> entry.getValue().equals(authority)) + .map(entry -> entry.getKey()).distinct(); + Stream s2 = groupAuthoritiesMapping.entrySet().stream() + .filter(entry -> entry.getValue().equals(authority)) + .map(entry -> entry.getKey()).distinct(); + return Stream.concat(s1, s2); + }) + .distinct() + .map(authority -> new SimpleGrantedAuthority(this.authorityPrefix + authority)) + .collect(Collectors.toSet()); + log.debug("JWT granted: {}", collect); + return collect; + } + + /** + * Sets the prefix to use for {@link GrantedAuthority authorities} mapped by this converter. + * Defaults to {@link JwtGrantedAuthoritiesConverter#DEFAULT_AUTHORITY_PREFIX}. + * + * @param authorityPrefix The authority prefix + */ + public void setAuthorityPrefix(String authorityPrefix) { + Assert.notNull(authorityPrefix, "authorityPrefix cannot be null"); + this.authorityPrefix = authorityPrefix; + } + + /** + * Sets the name of token claim to use for mapping {@link GrantedAuthority + * authorities} by this converter. Defaults to + * {@link JwtGrantedAuthoritiesConverter#WELL_KNOWN_SCOPES_CLAIM_NAMES}. + * + * @param authoritiesClaimName The token claim name to map authorities + */ + public void setAuthoritiesClaimName(String authoritiesClaimName) { + Assert.hasText(authoritiesClaimName, "authoritiesClaimName cannot be empty"); + this.authoritiesClaimName = authoritiesClaimName; + } + + /** + * Set the mapping from resolved authorities from jwt into granted authorities. + * + * @param authoritiesMapping the authoritiesMapping to set + */ + public void setAuthoritiesMapping(Map authoritiesMapping) { + Assert.notNull(authoritiesMapping, "authoritiesMapping cannot be null"); + this.roleAuthoritiesMapping = authoritiesMapping; + } + + /** + * Sets the name of token claim to use for group mapping {@link GrantedAuthority + * authorities} by this converter. Defaults to + * {@link org.springframework.security.oauth2.server.resource.authentication.JwtGrantedAuthoritiesConverter#WELL_KNOWN_AUTHORITIES_CLAIM_NAMES}. + * + * @param groupAuthoritiesClaimName the token claim name to map group + * authorities + */ + public void setGroupAuthoritiesClaimName(String groupAuthoritiesClaimName) { + this.groupAuthoritiesClaimName = groupAuthoritiesClaimName; + } + + /** + * Set the group mapping from resolved authorities from jwt into granted + * authorities. + * + * @param groupAuthoritiesMapping + */ + public void setGroupAuthoritiesMapping(Map groupAuthoritiesMapping) { + this.groupAuthoritiesMapping = groupAuthoritiesMapping; + } + + private String getAuthoritiesClaimName(Jwt jwt) { + if (this.authoritiesClaimName != null) { + return this.authoritiesClaimName; + } + for (String claimName : WELL_KNOWN_SCOPES_CLAIM_NAMES) { + if (jwt.hasClaim(claimName)) { + return claimName; + } + } + return null; + } + + private String getGroupAuthoritiesClaimName(Jwt jwt) { + if (this.groupAuthoritiesClaimName != null) { + return this.groupAuthoritiesClaimName; + } + for (String claimName : WELL_KNOWN_GROUPS_CLAIM_NAMES) { + if (jwt.hasClaim(claimName)) { + return claimName; + } + } + return null; + } + + private Collection getAuthorities(Jwt jwt) { + String scopeClaimName = getAuthoritiesClaimName(jwt); + String groupClaimName = getGroupAuthoritiesClaimName(jwt); + + List claimAsStringList1 = null; + List claimAsStringList2 = null; + + // spring-sec does wrong conversion with arrays + if (scopeClaimName != null && !ObjectUtils.isArray(jwt.getClaim(scopeClaimName))) { + claimAsStringList1 = jwt.getClaimAsStringList(scopeClaimName); + } + if (groupClaimName != null && !ObjectUtils.isArray(jwt.getClaim(groupClaimName))) { + claimAsStringList2 = jwt.getClaimAsStringList(groupClaimName); + } + + List claimAsStringList = new ArrayList<>(); + if (claimAsStringList1 != null) { + List collect = claimAsStringList1.stream() + .flatMap(c -> Arrays.stream(c.split(" "))) + .filter(c -> StringUtils.hasText(c)) + .collect(Collectors.toList()); + claimAsStringList.addAll(collect); + } + if (claimAsStringList2 != null) { + claimAsStringList.addAll(claimAsStringList2); + } + return claimAsStringList; + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityDisabled.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityDisabled.java new file mode 100644 index 0000000000..c5ad6f25af --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityDisabled.java @@ -0,0 +1,38 @@ +/* + * Copyright 2016-2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import org.springframework.boot.autoconfigure.condition.NoneNestedConditions; +import org.springframework.context.annotation.Condition; +import org.springframework.context.annotation.Conditional; + +/** + * {@link Condition} that is only valid if {@code security.basic.enabled} is {@code true} + * and the property {@code security.oauth2} exists. + * + * @author Gunnar Hillert + * @since 1.1.0 + */ +public class OnOAuth2SecurityDisabled extends NoneNestedConditions { + + public OnOAuth2SecurityDisabled() { + super(ConfigurationPhase.REGISTER_BEAN); + } + + @Conditional(OnOAuth2SecurityEnabled.class) + static class OAuthEnabled { + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityEnabled.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityEnabled.java new file mode 100644 index 0000000000..fbd0c656b3 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityEnabled.java @@ -0,0 +1,50 @@ +/* + * Copyright 2016-2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Collections; +import java.util.Map; + +import org.springframework.boot.autoconfigure.condition.ConditionOutcome; +import org.springframework.boot.autoconfigure.condition.SpringBootCondition; +import org.springframework.boot.context.properties.bind.Bindable; +import org.springframework.boot.context.properties.bind.Binder; +import org.springframework.context.annotation.Condition; +import org.springframework.context.annotation.ConditionContext; +import org.springframework.core.env.Environment; +import org.springframework.core.type.AnnotatedTypeMetadata; + +/** + * {@link Condition} that is only valid if the property + * {@code security.oauth2.client.client-id} exists. + * + * @author Gunnar Hillert + * @since 1.1.0 + */ +public class OnOAuth2SecurityEnabled extends SpringBootCondition { + + @Override + public ConditionOutcome getMatchOutcome(ConditionContext context, AnnotatedTypeMetadata metadata) { + Map properties = getSubProperties(context.getEnvironment(), "spring.security.oauth2"); + return new ConditionOutcome(!properties.isEmpty(), "OAuth2 Enabled"); + } + + public static Map getSubProperties(Environment environment, String keyPrefix) { + return Binder.get(environment) + .bind(keyPrefix, Bindable.mapOf(String.class, String.class)) + .orElseGet(Collections::emptyMap); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/PrincipalExtractor.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/PrincipalExtractor.java new file mode 100644 index 0000000000..4fb9e18b45 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/PrincipalExtractor.java @@ -0,0 +1,28 @@ +/* + * Copyright 2018-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Map; + +/** + * @author Gunnar Hillert + * @since 1.3.0 + * + */ +public interface PrincipalExtractor { + + Object extractPrincipal(Map map); +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityConfigUtils.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityConfigUtils.java new file mode 100644 index 0000000000..272242a8f0 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityConfigUtils.java @@ -0,0 +1,76 @@ +/* + * Copyright 2017-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.common.security.AuthorizationProperties; +import org.springframework.http.HttpMethod; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configurers.ExpressionUrlAuthorizationConfigurer; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * State-holder for computed security meta-information. + * + * @author Gunnar Hillert + */ +public class SecurityConfigUtils { + + private static final org.slf4j.Logger logger = LoggerFactory.getLogger(SecurityConfigUtils.class); + + public static final String ROLE_PREFIX = "ROLE_"; + + public static final Pattern AUTHORIZATION_RULE; + + public static final String BASIC_AUTH_REALM_NAME = "Spring"; + + static { + String methodsRegex = StringUtils.arrayToDelimitedString(HttpMethod.values(), "|"); + AUTHORIZATION_RULE = Pattern.compile("(" + methodsRegex + ")\\s+(.+)\\s+=>\\s+(.+)"); + } + + /** + * Read the configuration for "simple" (that is, not ACL based) security and apply it. + * + * @param security The ExpressionUrlAuthorizationConfigurer to apply the authorization rules to + * @param authorizationProperties Contains the rules to configure authorization + * + * @return ExpressionUrlAuthorizationConfigurer + */ + public static ExpressionUrlAuthorizationConfigurer.ExpressionInterceptUrlRegistry configureSimpleSecurity( + ExpressionUrlAuthorizationConfigurer.ExpressionInterceptUrlRegistry security, + AuthorizationProperties authorizationProperties) { + for (String rule : authorizationProperties.getRules()) { + Matcher matcher = AUTHORIZATION_RULE.matcher(rule); + Assert.isTrue(matcher.matches(), + String.format("Unable to parse security rule [%s], expected format is 'HTTP_METHOD ANT_PATTERN => " + + "SECURITY_ATTRIBUTE(S)'", rule)); + + HttpMethod method = HttpMethod.valueOf(matcher.group(1).trim()); + String urlPattern = matcher.group(2).trim(); + String attribute = matcher.group(3).trim(); + + logger.info("Authorization '{}' | '{}' | '{}'", method, attribute, urlPattern); + security = security.antMatchers(method, urlPattern).access(attribute); + } + return security; + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityStateBean.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityStateBean.java new file mode 100644 index 0000000000..2641ce9f63 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityStateBean.java @@ -0,0 +1,39 @@ +/* + * Copyright 2017-2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +/** + * State-holder for computed security meta-information. + * + * @author Gunnar Hillert + */ +public class SecurityStateBean { + + private boolean authenticationEnabled; + + public SecurityStateBean() { + super(); + } + + public boolean isAuthenticationEnabled() { + return authenticationEnabled; + } + + public void setAuthenticationEnabled(boolean authenticationEnabled) { + this.authenticationEnabled = authenticationEnabled; + } + +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/resources/META-INF/spring.factories b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/resources/META-INF/spring.factories new file mode 100644 index 0000000000..0a8aad951c --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/resources/META-INF/spring.factories @@ -0,0 +1,2 @@ +org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ + org.springframework.cloud.common.security.CommonSecurityAutoConfiguration diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityDisabledTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityDisabledTests.java new file mode 100644 index 0000000000..f39a46fe35 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityDisabledTests.java @@ -0,0 +1,62 @@ +/* + * Copyright 2018-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import org.junit.jupiter.api.Test; + +import org.springframework.boot.test.util.TestPropertyValues; +import org.springframework.cloud.common.security.support.OnOAuth2SecurityDisabled; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; +import org.springframework.context.annotation.Configuration; + +import static org.assertj.core.api.Assertions.assertThat; + +public class OnOAuth2SecurityDisabledTests { + + @Test + public void noPropertySet() throws Exception { + AnnotationConfigApplicationContext context = load(Config.class); + assertThat(context.containsBean("myBean")).isTrue(); + context.close(); + } + + @Test + public void propertyClientIdSet() throws Exception { + AnnotationConfigApplicationContext context = + load(Config.class, "spring.security.oauth2.client.registration.uaa.client-id:12345"); + assertThat(context.containsBean("myBean")).isFalse(); + context.close(); + } + + private AnnotationConfigApplicationContext load(Class config, String... env) { + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); + TestPropertyValues.of(env).applyTo(context); + context.register(config); + context.refresh(); + return context; + } + + @Configuration + @Conditional(OnOAuth2SecurityDisabled.class) + public static class Config { + @Bean + public String myBean() { + return "myBean"; + } + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityEnabledTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityEnabledTests.java new file mode 100644 index 0000000000..4bcfe1789c --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityEnabledTests.java @@ -0,0 +1,81 @@ +/* + * Copyright 2016-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import org.junit.jupiter.api.Test; + +import org.springframework.boot.test.util.TestPropertyValues; +import org.springframework.cloud.common.security.support.OnOAuth2SecurityEnabled; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; +import org.springframework.context.annotation.Configuration; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +/** + * @author Gunnar Hillert + */ +public class OnOAuth2SecurityEnabledTests { + + @Test + public void noPropertySet() throws Exception { + AnnotationConfigApplicationContext context = load(Config.class); + assertThat(context.containsBean("myBean")).isFalse(); + context.close(); + } + + @Test + public void propertySecurityOauth() throws Exception { + assertThatThrownBy(() -> { + load(Config.class, "spring.security.oauth2"); + }).isInstanceOf(IllegalStateException.class); + } + + @Test + public void propertyClientId() throws Exception { + AnnotationConfigApplicationContext context = load(Config.class, + "spring.security.oauth2.client.registration.uaa.client-id:12345"); + assertThat(context.containsBean("myBean")).isTrue(); + context.close(); + } + + @Test + public void clientIdOnlyWithNoValue() throws Exception { + AnnotationConfigApplicationContext context = load(Config.class, + "spring.security.oauth2.client.registration.uaa.client-id"); + assertThat(context.containsBean("myBean")).isTrue(); + context.close(); + } + + private AnnotationConfigApplicationContext load(Class config, String... env) { + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); + TestPropertyValues.of(env).applyTo(context); + context.register(config); + context.refresh(); + return context; + } + + @Configuration + @Conditional(OnOAuth2SecurityEnabled.class) + public static class Config { + @Bean + public String myBean() { + return "myBean"; + } + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapperTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapperTests.java new file mode 100644 index 0000000000..fde46cb93f --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapperTests.java @@ -0,0 +1,264 @@ +/* + * Copyright 2017-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.junit.jupiter.api.Test; + +import org.springframework.cloud.common.security.ProviderRoleMapping; +import org.springframework.security.core.GrantedAuthority; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +/** + * @author Gunnar Hillert + */ +public class DefaultAuthoritiesMapperTests { + + @Test + public void testNullConstructor() throws Exception { + assertThatThrownBy(() -> { + new DefaultAuthoritiesMapper(null, ""); + }).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("providerRoleMappings must not be null."); + } + + @Test + public void testMapScopesToAuthoritiesWithNullParameters() throws Exception { + DefaultAuthoritiesMapper authoritiesMapper = new DefaultAuthoritiesMapper(Collections.emptyMap(), ""); + + assertThatThrownBy(() -> { + authoritiesMapper.mapScopesToAuthorities(null, null, null); + }).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("The scopes argument must not be null."); + assertThatThrownBy(() -> { + authoritiesMapper.mapScopesToAuthorities("myClientId", null, null); + }).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("The scopes argument must not be null."); + } + + @Test + public void testThat7AuthoritiesAreReturned() throws Exception { + DefaultAuthoritiesMapper authoritiesMapper = new DefaultAuthoritiesMapper("uaa", false); + Set authorities = authoritiesMapper.mapScopesToAuthorities("uaa", Collections.emptySet(), null); + + assertThat(authorities).hasSize(7); + assertThat(authorities.stream().map(authority -> authority.getAuthority()).collect(Collectors.toList())) + .containsExactlyInAnyOrder("ROLE_MANAGE", "ROLE_CREATE", "ROLE_VIEW", "ROLE_DEPLOY", "ROLE_MODIFY", + "ROLE_SCHEDULE", "ROLE_DESTROY"); + } + + @Test + public void testEmptyMapConstructor() throws Exception { + Set scopes = new HashSet<>(); + scopes.add("dataflow.manage"); + scopes.add("dataflow.view"); + scopes.add("dataflow.create"); + + DefaultAuthoritiesMapper authoritiesMapper = new DefaultAuthoritiesMapper("uaa", true); + Collection authorities = authoritiesMapper.mapScopesToAuthorities("uaa", scopes, null); + + assertThat(authorities).hasSize(3); + assertThat(authorities.stream().map(authority -> authority.getAuthority()).collect(Collectors.toList())) + .containsExactlyInAnyOrder("ROLE_MANAGE", "ROLE_CREATE", "ROLE_VIEW"); + } + + @Test + public void testMapConstructorWithIncompleteRoleMappings() throws Exception { + ProviderRoleMapping roleMapping = new ProviderRoleMapping(); + roleMapping.setMapOauthScopes(true); + roleMapping.addRoleMapping("ROLE_MANAGE", "foo-scope-in-oauth"); + assertThatThrownBy(() -> { + new DefaultAuthoritiesMapper("uaa", roleMapping); + }).isInstanceOf(IllegalArgumentException.class).hasMessageContaining( + "The following 6 roles are not mapped: CREATE, DEPLOY, DESTROY, MODIFY, SCHEDULE, VIEW."); + } + + @Test + public void testThat7MappedAuthoritiesAreReturned() throws Exception { + Map roleMappings = new HashMap<>(); + roleMappings.put("ROLE_MANAGE", "foo-manage"); + roleMappings.put("ROLE_VIEW", "bar-view"); + roleMappings.put("ROLE_CREATE", "blubba-create"); + roleMappings.put("ROLE_MODIFY", "foo-modify"); + roleMappings.put("ROLE_DEPLOY", "foo-deploy"); + roleMappings.put("ROLE_DESTROY", "foo-destroy"); + roleMappings.put("ROLE_SCHEDULE", "foo-schedule"); + + ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping(); + providerRoleMapping.setMapOauthScopes(true); + providerRoleMapping.getRoleMappings().putAll(roleMappings); + + Set scopes = new HashSet<>(); + scopes.add("foo-manage"); + scopes.add("bar-view"); + scopes.add("blubba-create"); + scopes.add("foo-modify"); + scopes.add("foo-deploy"); + scopes.add("foo-destroy"); + scopes.add("foo-schedule"); + + DefaultAuthoritiesMapper defaultAuthoritiesMapper = new DefaultAuthoritiesMapper("uaa", providerRoleMapping); + Collection authorities = defaultAuthoritiesMapper.mapScopesToAuthorities("uaa", + scopes, null); + + assertThat(authorities).hasSize(7); + assertThat(authorities.stream().map(authority -> authority.getAuthority()).collect(Collectors.toList())) + .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_DEPLOY", "ROLE_DESTROY", "ROLE_MANAGE", "ROLE_MODIFY", + "ROLE_SCHEDULE", "ROLE_VIEW"); + } + + @Test + public void testThat3MappedAuthoritiesAreReturnedForDefaultMapping() throws Exception { + ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping(); + providerRoleMapping.setMapOauthScopes(true); + + Set scopes = new HashSet<>(); + scopes.add("dataflow.manage"); + scopes.add("dataflow.view"); + scopes.add("dataflow.create"); + + DefaultAuthoritiesMapper defaultAuthoritiesExtractor = new DefaultAuthoritiesMapper("uaa", providerRoleMapping); + Collection authorities = defaultAuthoritiesExtractor.mapScopesToAuthorities("uaa", + scopes, null); + + assertThat(authorities).hasSize(3); + assertThat(authorities.stream().map(authority -> authority.getAuthority()).collect(Collectors.toList())) + .containsExactlyInAnyOrder("ROLE_MANAGE", "ROLE_CREATE", "ROLE_VIEW"); + } + + @Test + public void testThat7MappedAuthoritiesAreReturnedForDefaultMappingWithoutMappingScopes() throws Exception { + Set scopes = new HashSet<>(); + scopes.add("dataflow.manage"); + scopes.add("dataflow.view"); + scopes.add("dataflow.create"); + + DefaultAuthoritiesMapper defaultAuthoritiesExtractor = new DefaultAuthoritiesMapper("uaa", false); + Collection authorities = defaultAuthoritiesExtractor.mapScopesToAuthorities("uaa", + scopes, null); + + assertThat(authorities).hasSize(7); + assertThat(authorities.stream().map(authority -> authority.getAuthority()).collect(Collectors.toList())) + .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_DEPLOY", "ROLE_DESTROY", "ROLE_MANAGE", "ROLE_MODIFY", + "ROLE_SCHEDULE", "ROLE_VIEW"); + } + + @Test + public void testThat2MappedAuthoritiesAreReturnedForDefaultMapping() throws Exception { + Set scopes = new HashSet<>(); + scopes.add("dataflow.view"); + scopes.add("dataflow.create"); + + DefaultAuthoritiesMapper defaultAuthoritiesExtractor = new DefaultAuthoritiesMapper("uaa", true); + Collection authorities = defaultAuthoritiesExtractor.mapScopesToAuthorities("uaa", + scopes, null); + + assertThat(authorities).hasSize(2); + assertThat(authorities.stream().map(authority -> authority.getAuthority()).collect(Collectors.toList())) + .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_VIEW"); + } + + @Test + public void testThat7AuthoritiesAreReturnedAndOneOAuthScopeCoversMultipleServerRoles() throws Exception { + Map roleMappings = new HashMap<>(); + roleMappings.put("ROLE_MANAGE", "foo-manage"); + roleMappings.put("ROLE_VIEW", "foo-manage"); + roleMappings.put("ROLE_DEPLOY", "foo-manage"); + roleMappings.put("ROLE_DESTROY", "foo-manage"); + roleMappings.put("ROLE_MODIFY", "foo-manage"); + roleMappings.put("ROLE_SCHEDULE", "foo-manage"); + roleMappings.put("ROLE_CREATE", "blubba-create"); + + Set scopes = new HashSet<>(); + scopes.add("foo-manage"); + scopes.add("blubba-create"); + + DefaultAuthoritiesMapper defaultAuthoritiesExtractor = new DefaultAuthoritiesMapper("uaa", true, roleMappings); + Collection authorities = defaultAuthoritiesExtractor.mapScopesToAuthorities("uaa", + scopes, null); + + assertThat(authorities).hasSize(7); + assertThat(authorities.stream().map(authority -> authority.getAuthority()).collect(Collectors.toList())) + .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_DEPLOY", "ROLE_DESTROY", "ROLE_MANAGE", "ROLE_MODIFY", + "ROLE_SCHEDULE", "ROLE_VIEW"); + } + + @Test + public void testThatUriStyleScopeRemovesLeadingPart() throws Exception { + Map roleMappings = new HashMap<>(); + roleMappings.put("ROLE_MANAGE", "foo-manage"); + roleMappings.put("ROLE_VIEW", "foo-manage"); + roleMappings.put("ROLE_DEPLOY", "foo-manage"); + roleMappings.put("ROLE_DESTROY", "foo-manage"); + roleMappings.put("ROLE_MODIFY", "foo-manage"); + roleMappings.put("ROLE_SCHEDULE", "foo-manage"); + roleMappings.put("ROLE_CREATE", "blubba-create"); + + Set scopes = new HashSet<>(); + scopes.add("api://foobar/foo-manage"); + scopes.add("blubba-create"); + + DefaultAuthoritiesMapper defaultAuthoritiesExtractor = new DefaultAuthoritiesMapper("uaa", true, roleMappings); + Collection authorities = defaultAuthoritiesExtractor.mapScopesToAuthorities("uaa", + scopes, null); + + assertThat(authorities).hasSize(7); + assertThat(authorities.stream().map(authority -> authority.getAuthority()).collect(Collectors.toList())) + .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_DEPLOY", "ROLE_DESTROY", "ROLE_MANAGE", "ROLE_MODIFY", + "ROLE_SCHEDULE", "ROLE_VIEW"); + } + + @Test + public void testThatUriStyleScopeParsingCanBeDisabled() throws Exception { + Map roleMappings = new HashMap<>(); + roleMappings.put("ROLE_MANAGE", "/ROLE/2000803042"); + roleMappings.put("ROLE_VIEW", "/ROLE/2000803036"); + roleMappings.put("ROLE_DEPLOY", "/ROLE/2000803039"); + roleMappings.put("ROLE_DESTROY", "/ROLE/20008030340"); + roleMappings.put("ROLE_MODIFY", "/ROLE/2000803037"); + roleMappings.put("ROLE_SCHEDULE", "/ROLE/2000803038"); + roleMappings.put("ROLE_CREATE", "/ROLE/2000803041"); + + ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping(); + providerRoleMapping.setMapOauthScopes(true); + providerRoleMapping.setParseOauthScopePathParts(false); + providerRoleMapping.getRoleMappings().putAll(roleMappings); + + Set scopes = new HashSet<>(); + scopes.add("/ROLE/2000803042"); + scopes.add("/ROLE/2000803036"); + scopes.add("/ROLE/2000803039"); + scopes.add("/ROLE/20008030340"); + scopes.add("/ROLE/2000803037"); + scopes.add("/ROLE/2000803038"); + scopes.add("/ROLE/2000803041"); + + DefaultAuthoritiesMapper defaultAuthoritiesMapper = new DefaultAuthoritiesMapper("uaa", providerRoleMapping); + Collection authorities = defaultAuthoritiesMapper.mapScopesToAuthorities("uaa", + scopes, null); + + assertThat(authorities).hasSize(7); + assertThat(authorities.stream().map(authority -> authority.getAuthority()).collect(Collectors.toList())) + .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_DEPLOY", "ROLE_DESTROY", "ROLE_MANAGE", "ROLE_MODIFY", + "ROLE_SCHEDULE", "ROLE_VIEW"); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapperTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapperTests.java new file mode 100644 index 0000000000..a303a2da44 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapperTests.java @@ -0,0 +1,80 @@ +/* + * Copyright 2018-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.io.IOException; +import java.net.URI; +import java.util.HashSet; +import java.util.Set; + +import com.fasterxml.jackson.databind.ObjectMapper; +import mockwebserver3.MockResponse; +import mockwebserver3.MockWebServer; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; + +import static org.assertj.core.api.Assertions.assertThat; + + +/** + * @author Mike Heath + * @author Gunnar Hillert + * @author Corneil du Plessis + */ +public class ExternalOauth2ResourceAuthoritiesMapperTests { + + public MockWebServer mockBackEnd; + + + @BeforeEach + public void setUp() throws IOException { + mockBackEnd = new MockWebServer(); + mockBackEnd.start(); + } + @AfterEach + public void tearDown() throws IOException { + mockBackEnd.shutdown(); + } + + + @Test + public void testExtractAuthorities() throws Exception { + assertAuthorities2(mockBackEnd.url("/authorities").uri(), "VIEW"); + assertAuthorities2(mockBackEnd.url("/authorities").uri(), "VIEW", "CREATE", "MANAGE"); + assertAuthorities2(mockBackEnd.url("/").uri(), "MANAGE"); + assertAuthorities2(mockBackEnd.url("/").uri(), "DEPLOY", "DESTROY", "MODIFY", "SCHEDULE"); + assertThat(mockBackEnd.getRequestCount()).isEqualTo(4); + } + + private void assertAuthorities2(URI uri, String... roles) throws Exception { + ObjectMapper objectMapper = new ObjectMapper(); + mockBackEnd.enqueue(new MockResponse().newBuilder() + .body(objectMapper.writeValueAsString(roles)) + .addHeader("Content-Type", "application/json").build()); + + final ExternalOauth2ResourceAuthoritiesMapper authoritiesExtractor = + new ExternalOauth2ResourceAuthoritiesMapper(uri); + final Set grantedAuthorities = authoritiesExtractor.mapScopesToAuthorities(null, new HashSet<>(), "1234567"); + for (String role : roles) { + assertThat(grantedAuthorities).containsAnyOf(new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + role)); + } + assertThat(mockBackEnd.takeRequest().getHeaders().get("Authorization")).isEqualTo("Bearer 1234567"); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverterTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverterTests.java new file mode 100644 index 0000000000..ac5fb55274 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverterTests.java @@ -0,0 +1,271 @@ +/* + * Copyright 2020-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.common.security.support; + +import java.time.Instant; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.security.oauth2.jwt.Jwt; + +import static org.assertj.core.api.Assertions.assertThat; + +/** + * Tests for {@link MappingJwtGrantedAuthoritiesConverter} + * + */ +public class MappingJwtGrantedAuthoritiesConverterTests { + + public static Jwt.Builder jwt() { + return Jwt.withTokenValue("token") + .header("alg", "none") + .audience(Arrays.asList("https://audience.example.org")) + .expiresAt(Instant.MAX) + .issuedAt(Instant.MIN) + .issuer("https://issuer.example.org") + .jti("jti") + .notBefore(Instant.MIN) + .subject("mock-test-subject"); + } + + public static Jwt user() { + return jwt() + .claim("sub", "mock-test-subject") + .build(); + } + + @Test + public void convertWhenTokenHasScopeAttributeThenTranslatedToAuthorities() { + Jwt jwt = jwt().claim("scope", "message:read message:write").build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactlyInAnyOrder( + new SimpleGrantedAuthority("SCOPE_message:read"), + new SimpleGrantedAuthority("SCOPE_message:write")); + } + + @Test + public void convertWithCustomAuthorityPrefixWhenTokenHasScopeAttributeThenTranslatedToAuthoritiesViaMapping() { + Jwt jwt = jwt().claim("scope", "message:read message:write").build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + jwtGrantedAuthoritiesConverter.setAuthorityPrefix("ROLE_"); + Map authoritiesMapping = new HashMap<>(); + authoritiesMapping.put("READ", "message:read"); + authoritiesMapping.put("WRITE", "message:write"); + jwtGrantedAuthoritiesConverter.setAuthoritiesMapping(authoritiesMapping); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactly( + new SimpleGrantedAuthority("ROLE_READ"), + new SimpleGrantedAuthority("ROLE_WRITE")); + } + + @Test + public void convertWithCustomAuthorityWhenTokenHasScopeAttributeThenTranslatedToAuthoritiesViaMapping() { + Jwt jwt = jwt().claim("scope", "message:read message:write").build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + jwtGrantedAuthoritiesConverter.setAuthorityPrefix(""); + Map authoritiesMapping = new HashMap<>(); + authoritiesMapping.put("ROLE_READ", "message:read"); + authoritiesMapping.put("ROLE_WRITE", "message:write"); + jwtGrantedAuthoritiesConverter.setAuthoritiesMapping(authoritiesMapping); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactly( + new SimpleGrantedAuthority("ROLE_READ"), + new SimpleGrantedAuthority("ROLE_WRITE")); + } + + @Test + public void convertWithCustomAuthorityPrefixWhenTokenHasScopeAttributeThenTranslatedToAuthorities() { + Jwt jwt = jwt().claim("scope", "message:read message:write").build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + jwtGrantedAuthoritiesConverter.setAuthorityPrefix("ROLE_"); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactlyInAnyOrder( + new SimpleGrantedAuthority("ROLE_message:read"), + new SimpleGrantedAuthority("ROLE_message:write")); + } + + @Test + public void convertWhenTokenHasEmptyScopeAttributeThenTranslatedToNoAuthorities() { + Jwt jwt = jwt().claim("scope", "").build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasScpAttributeThenTranslatedToAuthorities() { + Jwt jwt = jwt().claim("scp", Arrays.asList("message:read", "message:write")).build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactlyInAnyOrder( + new SimpleGrantedAuthority("SCOPE_message:read"), + new SimpleGrantedAuthority("SCOPE_message:write")); + } + + @Test + public void convertWithCustomAuthorityPrefixWhenTokenHasScpAttributeThenTranslatedToAuthorities() { + Jwt jwt = jwt().claim("scp", Arrays.asList("message:read", "message:write")).build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + jwtGrantedAuthoritiesConverter.setAuthorityPrefix("ROLE_"); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactlyInAnyOrder( + new SimpleGrantedAuthority("ROLE_message:read"), + new SimpleGrantedAuthority("ROLE_message:write")); + } + + @Test + public void convertWhenTokenHasEmptyScpAttributeThenTranslatedToNoAuthorities() { + Jwt jwt = jwt().claim("scp", Collections.emptyList()).build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasBothScopeAndScpThenScopeAttributeIsTranslatedToAuthorities() { + Jwt jwt = jwt() + .claim("scp", Arrays.asList("message:read", "message:write")) + .claim("scope", "missive:read missive:write") + .build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactly( + new SimpleGrantedAuthority("SCOPE_missive:read"), + new SimpleGrantedAuthority("SCOPE_missive:write")); + } + + @Test + public void convertWhenTokenHasEmptyScopeAndNonEmptyScpThenScopeAttributeIsTranslatedToNoAuthorities() { + Jwt jwt = jwt() + .claim("scp", Arrays.asList("message:read", "message:write")) + .claim("scope", "") + .build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasEmptyScopeAndEmptyScpAttributeThenTranslatesToNoAuthorities() { + Jwt jwt = jwt() + .claim("scp", Collections.emptyList()) + .claim("scope", Collections.emptyList()) + .build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasNoScopeAndNoScpAttributeThenTranslatesToNoAuthorities() { + Jwt jwt = jwt().claim("xxx", Arrays.asList("message:read", "message:write")).build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasUnsupportedTypeForScopeThenTranslatesToNoAuthorities() { + Jwt jwt = jwt().claim("scope", new String[] {"message:read", "message:write"}).build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasCustomClaimNameThenCustomClaimNameAttributeIsTranslatedToAuthorities() { + Jwt jwt = jwt() + .claim("xxx", Arrays.asList("message:read", "message:write")) + .claim("scope", "missive:read missive:write") + .build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + jwtGrantedAuthoritiesConverter.setAuthoritiesClaimName("xxx"); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactlyInAnyOrder( + new SimpleGrantedAuthority("SCOPE_message:read"), + new SimpleGrantedAuthority("SCOPE_message:write")); + } + + @Test + public void convertWhenTokenHasEmptyCustomClaimNameThenCustomClaimNameAttributeIsTranslatedToNoAuthorities() { + Jwt jwt = jwt() + .claim("roles", Collections.emptyList()) + .claim("scope", "missive:read missive:write") + .build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + jwtGrantedAuthoritiesConverter.setAuthoritiesClaimName("roles"); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasNoCustomClaimNameThenCustomClaimNameAttributeIsTranslatedToNoAuthorities() { + Jwt jwt = jwt().claim("scope", "missive:read missive:write").build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + jwtGrantedAuthoritiesConverter.setAuthoritiesClaimName("roles"); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasGroupClaims() { + Jwt jwt = jwt().claim("groups", Arrays.asList("role1")).build(); + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + assertThat(authorities).containsExactlyInAnyOrder(new SimpleGrantedAuthority("SCOPE_role1")); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/OAuth2TokenUtilsServiceTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/OAuth2TokenUtilsServiceTests.java new file mode 100644 index 0000000000..d0aa68a8c2 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/OAuth2TokenUtilsServiceTests.java @@ -0,0 +1,154 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.time.Instant; + +import org.junit.jupiter.api.Test; + +import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClient; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClientService; +import org.springframework.security.oauth2.client.authentication.OAuth2AuthenticationToken; +import org.springframework.security.oauth2.client.registration.ClientRegistration; +import org.springframework.security.oauth2.core.AuthorizationGrantType; +import org.springframework.security.oauth2.core.OAuth2AccessToken; +import org.springframework.security.oauth2.core.OAuth2AccessToken.TokenType; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * + * @author Gunnar Hillert + * + */ +public class OAuth2TokenUtilsServiceTests { + + @Test + public void testGetAccessTokenOfAuthenticatedUserWithNoAuthentication() { + SecurityContextHolder.getContext().setAuthentication(null); + + final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class); + OAuth2TokenUtilsService oAuth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService); + + assertThatThrownBy(() -> { + oAuth2TokenUtilsService.getAccessTokenOfAuthenticatedUser(); + }).isInstanceOf(IllegalStateException.class).hasMessageContaining( + "Cannot retrieve the authentication object from the SecurityContext. Are you authenticated?"); + } + + @Test + public void testGetAccessTokenOfAuthenticatedUserWithWrongAuthentication() { + final Authentication authentication = mock(Authentication.class); + SecurityContextHolder.getContext().setAuthentication(authentication); + + final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class); + OAuth2TokenUtilsService oAuth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService); + + assertThatThrownBy(() -> { + oAuth2TokenUtilsService.getAccessTokenOfAuthenticatedUser(); + }).isInstanceOf(IllegalStateException.class).hasMessageContaining("Unsupported authentication object type"); + SecurityContextHolder.getContext().setAuthentication(null); + } + + @Test + public void testGetAccessTokenOfAuthenticatedUserWithEmptyPrincipalName() { + final OAuth2AuthenticationToken authentication = mock(OAuth2AuthenticationToken.class); + when(authentication.getName()).thenReturn(""); + when(authentication.getAuthorizedClientRegistrationId()).thenReturn("uaa"); + SecurityContextHolder.getContext().setAuthentication(authentication); + + final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class); + OAuth2TokenUtilsService oAuth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService); + + assertThatThrownBy(() -> { + oAuth2TokenUtilsService.getAccessTokenOfAuthenticatedUser(); + }).isInstanceOf(IllegalStateException.class) + .hasMessageContaining("The retrieved principalName must not be null or empty."); + SecurityContextHolder.getContext().setAuthentication(null); + } + + @Test + public void testGetAccessTokenOfAuthenticatedUserWithEmptyClientRegistrationId() { + final OAuth2AuthenticationToken authentication = mock(OAuth2AuthenticationToken.class); + when(authentication.getName()).thenReturn("FOO"); + when(authentication.getAuthorizedClientRegistrationId()).thenReturn(""); + SecurityContextHolder.getContext().setAuthentication(authentication); + + final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class); + OAuth2TokenUtilsService oAuth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService); + + assertThatThrownBy(() -> { + oAuth2TokenUtilsService.getAccessTokenOfAuthenticatedUser(); + }).isInstanceOf(IllegalStateException.class) + .hasMessageContaining("The retrieved clientRegistrationId must not be null or empty."); + SecurityContextHolder.getContext().setAuthentication(null); + } + + @Test + public void testGetAccessTokenOfAuthenticatedUserWithWrongClientRegistrationId() { + final OAuth2AuthenticationToken authentication = mock(OAuth2AuthenticationToken.class); + when(authentication.getName()).thenReturn("my-username"); + when(authentication.getAuthorizedClientRegistrationId()).thenReturn("CID"); + SecurityContextHolder.getContext().setAuthentication(authentication); + + final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class); + when(oauth2AuthorizedClientService.loadAuthorizedClient("uaa", "my-username")).thenReturn(getOAuth2AuthorizedClient()); + final OAuth2TokenUtilsService oauth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService); + + assertThatThrownBy(() -> { + oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser(); + }).isInstanceOf(IllegalStateException.class).hasMessageContaining( + "No oauth2AuthorizedClient returned for clientRegistrationId 'CID' and principalName 'my-username'."); + SecurityContextHolder.getContext().setAuthentication(null); + } + + @Test + public void testGetAccessTokenOfAuthenticatedUserWithAuthentication() { + final OAuth2AuthenticationToken authentication = mock(OAuth2AuthenticationToken.class); + when(authentication.getName()).thenReturn("my-username"); + when(authentication.getAuthorizedClientRegistrationId()).thenReturn("uaa"); + SecurityContextHolder.getContext().setAuthentication(authentication); + + final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class); + when(oauth2AuthorizedClientService.loadAuthorizedClient("uaa", "my-username")).thenReturn(getOAuth2AuthorizedClient()); + final OAuth2TokenUtilsService oauth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService); + + assertThat(oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser()).isEqualTo("foo-bar-123-token"); + SecurityContextHolder.getContext().setAuthentication(null); + } + + private OAuth2AuthorizedClient getOAuth2AuthorizedClient() { + final ClientRegistration clientRegistration = ClientRegistration + .withRegistrationId("uaa") + .clientId("clientId") + .clientSecret("clientSecret") + .redirectUri("blubba") + .authorizationUri("blubba") + .tokenUri("blubba") + .authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE) + .build(); + final OAuth2AccessToken accessToken = new OAuth2AccessToken(TokenType.BEARER, "foo-bar-123-token", Instant.now(), Instant.now().plusMillis(100000)); + final OAuth2AuthorizedClient authorizedClient = new OAuth2AuthorizedClient(clientRegistration, "my-username", accessToken); + return authorizedClient; + } + +} diff --git a/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/pom.xml b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/pom.xml new file mode 100644 index 0000000000..26784c8560 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/pom.xml @@ -0,0 +1,68 @@ + + + 4.0.0 + + org.springframework.cloud + spring-cloud-common-security-config + 2.11.6-SNAPSHOT + + spring-cloud-starter-common-security-config-web + spring-cloud-starter-common-security-config-web + Spring Cloud Starter Common Security Config Web + pom + + true + 5.0.0-alpha.14 + + + + org.springframework.cloud + spring-cloud-common-security-config-web + ${project.version} + + + org.springframework.boot + spring-boot-starter-web + test + + + org.springframework.boot + spring-boot-starter-test + test + + + com.squareup.okhttp3 + mockwebserver3-junit5 + ${okhttp3.version} + test + + + com.squareup.okhttp3 + okhttp + ${okhttp3.version} + test + + + org.jetbrains.kotlin + kotlin-stdlib-jdk8 + 1.8.22 + test + + + + + + org.apache.maven.plugins + maven-jar-plugin + 3.3.0 + + + + test-jar + + + + + + + diff --git a/spring-cloud-dataflow-completion/src/test/support/common/src/main/java/com/acme/common/SomeEnum.java b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityApplicationTests.java similarity index 61% rename from spring-cloud-dataflow-completion/src/test/support/common/src/main/java/com/acme/common/SomeEnum.java rename to spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityApplicationTests.java index 71c096cc1d..df2f761d8e 100644 --- a/spring-cloud-dataflow-completion/src/test/support/common/src/main/java/com/acme/common/SomeEnum.java +++ b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityApplicationTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,17 +13,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +package org.springframework.cloud.common.security; -package com.acme.common; +import org.junit.jupiter.api.Test; + +import org.springframework.boot.test.context.SpringBootTest; /** - * An enum class used in {@link ConfigProperties}. Useful to test, because this class has - * to be accessible to the ClassLoader used to retrieve metadata. + * Testing startup and configuration * - * @author Eric Bottard + * @author Corneil du Plessis */ -public enum SomeEnum { - one, - two, - three; +@SpringBootTest +class SpringCloudCommonSecurityApplicationTests { + + @Test + void contextLoads() { + } + } diff --git a/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityTestApplication.java b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityTestApplication.java new file mode 100644 index 0000000000..08c8855d75 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityTestApplication.java @@ -0,0 +1,58 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import java.security.Principal; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.actuate.autoconfigure.metrics.MetricsAutoConfiguration; +import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration; +import org.springframework.boot.autoconfigure.security.servlet.UserDetailsServiceAutoConfiguration; +import org.springframework.boot.autoconfigure.session.SessionAutoConfiguration; +import org.springframework.context.annotation.Import; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RestController; + +/** + * Minimal application to verify configuration + * + * @author Corneil du Plessis + */ +@SpringBootApplication(exclude = { + MetricsAutoConfiguration.class, + ManagementWebSecurityAutoConfiguration.class, + SecurityAutoConfiguration.class, + UserDetailsServiceAutoConfiguration.class, + SessionAutoConfiguration.class +}) + +@Import({CommonSecurityAutoConfiguration.class, TestOAuthSecurityConfiguration.class}) +public class SpringCloudCommonSecurityTestApplication { + + public static void main(String[] args) { + SpringApplication.run(SpringCloudCommonSecurityTestApplication.class, args); + } + + @RestController + public static class SimpleController { + @GetMapping("/user") + public String getUser(Principal principal) { + return principal.getName(); + } + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/TestOAuthSecurityConfiguration.java b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/TestOAuthSecurityConfiguration.java new file mode 100644 index 0000000000..0b1b2ea2e8 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/TestOAuthSecurityConfiguration.java @@ -0,0 +1,49 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.cloud.common.security.support.OnOAuth2SecurityEnabled; +import org.springframework.cloud.common.security.support.SecurityStateBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; + +/** + * We need to mimic the configuration of Dataflow and Skipper + * + * @author Corneil du Plessis + */ +@Configuration(proxyBeanMethods = false) +@Conditional(OnOAuth2SecurityEnabled.class) +@Import(TestOAuthSecurityConfiguration.SecurityStateBeanConfig.class) +public class TestOAuthSecurityConfiguration extends OAuthSecurityConfiguration { + + @Configuration(proxyBeanMethods = false) + public static class SecurityStateBeanConfig { + @Bean + public SecurityStateBean securityStateBean() { + return new SecurityStateBean(); + } + + @Bean + @ConfigurationProperties(prefix = "spring.cloud.common.security.test.authorization") + public AuthorizationProperties authorizationProperties() { + return new AuthorizationProperties(); + } + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/resources/application.yml b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/resources/application.yml new file mode 100644 index 0000000000..e5de703119 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/resources/application.yml @@ -0,0 +1,40 @@ +logging: +# file: +# name: sccsc-test.log + level: + org.springframework: DEBUG +spring: + security: + oauth2: + client: + registration: + uaa: + redirect-uri: '{baseUrl}/login/oauth2/code/{registrationId}' + authorization-grant-type: authorization_code + client-id: myclient + client-secret: mysecret + access-token-uri: http://127.0.0.1:8888/oauth/token + user-authorization-uri: http://127.0.0.1:8888/oauth/authorize + provider: + uaa: + authorization-uri: http://127.0.0.1:8888/oauth/authorize + user-info-uri: http://127.0.0.1:8888/me + token-uri: http://127.0.0.1:8888/oauth/token + resourceserver: + opaquetoken: + introspection-uri: http://127.0.0.1:8888/oauth/check_token + client-id: myclient + client-secret: mysecret + cloud: + common: + security: + test: + authorization: + check-token-access: isAuthenticated() + authorization: + enabled: true + permit-all-paths: "/user,./assets/**,/dashboard/logout-success-oauth.html" + authenticated-paths: "/user" + rules: + # User + - GET /user => hasRole('ROLE_VIEW') diff --git a/spring-cloud-dataflow-aggregate-task/README.adoc b/spring-cloud-dataflow-aggregate-task/README.adoc new file mode 100644 index 0000000000..54ea33b3a3 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/README.adoc @@ -0,0 +1,10 @@ += Spring Cloud Dataflow Aggregate Task Module + +Spring Cloud Task and Spring Batch utilize a series of database tables to support storing data about Boot Application executions as well as Job executions. +For each major release of these projects, their database schemas adjust to meet the needs for the latest release. +SCDF supports applications that may use the current release of these projects as well as a previous release. +The `spring-cloud-dataflow-aggregate-task` module provides support for dataflow to query and mutate data in each of the schema versions. + +== Tests + +The tests for this module are located in the `spring-cloud-dataflow-server` module \ No newline at end of file diff --git a/spring-cloud-dataflow-aggregate-task/pom.xml b/spring-cloud-dataflow-aggregate-task/pom.xml new file mode 100644 index 0000000000..88f9fe8db9 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/pom.xml @@ -0,0 +1,110 @@ + + + 4.0.0 + + org.springframework.cloud + spring-cloud-dataflow-parent + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent + + spring-cloud-dataflow-aggregate-task + spring-cloud-dataflow-aggregate-task + Spring Cloud Data Flow Aggregate Task + + jar + + true + 3.4.1 + + + + org.springframework + spring-core + + + org.springframework + spring-context + compile + + + org.springframework.cloud + spring-cloud-task-batch + + + org.springframework.cloud + spring-cloud-dataflow-core + ${project.version} + + + org.springframework.cloud + spring-cloud-dataflow-registry + ${project.version} + + + org.springframework.cloud + spring-cloud-dataflow-schema + ${project.version} + + + org.slf4j + slf4j-api + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + + + org.springframework.boot + spring-boot-starter-test + test + + + com.h2database + h2 + test + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + false + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + + diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateExecutionSupport.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateExecutionSupport.java new file mode 100644 index 0000000000..3f8d12ab7d --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateExecutionSupport.java @@ -0,0 +1,67 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.aggregate.task; + +import org.springframework.cloud.dataflow.core.AppRegistration; +import org.springframework.cloud.dataflow.core.TaskDefinition; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.task.repository.TaskExecution; + +/** + * Allows users to retrieve Task execution and SchemaVersion information from either {@link TaskExecution} as well as + * Task Name. + * @author Corneil du Plessis + */ +public interface AggregateExecutionSupport { + + /** + * Retrieves the {@link AggregateTaskExecution} for the task execution and {@link TaskDefinitionReader} provided. + * @param execution A {@link TaskExecution} that contains the TaskName that will be used to find the {@link AggregateTaskExecution}. + * @param taskDefinitionReader {@link TaskDefinitionReader} that will be used to find the {@link SchemaVersionTarget} for the task execution. + * @param taskDeploymentReader {@link TaskDeploymentReader} will be used to read the deployment. + * @return The {@link AggregateTaskExecution} containing the {@link SchemaVersionTarget} for the TaskExecution. + */ + AggregateTaskExecution from(TaskExecution execution, TaskDefinitionReader taskDefinitionReader, TaskDeploymentReader taskDeploymentReader); + + /** + * Retrieves the {@link SchemaVersionTarget} for the task name. + * @param taskName The name of the {@link org.springframework.cloud.dataflow.core.TaskDefinition} from which the {@link SchemaVersionTarget} will be retreived. + * @param taskDefinitionReader {@link TaskDefinitionReader} that will be used to find the {@link SchemaVersionTarget} + * @return The {@link SchemaVersionTarget} for the taskName specified. + */ + SchemaVersionTarget findSchemaVersionTarget(String taskName, TaskDefinitionReader taskDefinitionReader); + SchemaVersionTarget findSchemaVersionTarget(String taskName, String version, TaskDefinitionReader taskDefinitionReader); + SchemaVersionTarget findSchemaVersionTarget(String taskName, TaskDefinition taskDefinition); + SchemaVersionTarget findSchemaVersionTarget(String taskName, String version, TaskDefinition taskDefinition); + + /** + * Retrieve the {@link AppRegistration} for the registeredName. + * @param registeredName Registered name for registration to find. + * @return The application registration + */ + AppRegistration findTaskAppRegistration(String registeredName); + AppRegistration findTaskAppRegistration(String registeredName, String version); + + /** + * Return the {@link AggregateTaskExecution} for the {@link TaskExecution} and Schema Target name specified. + * @param execution The task execution + * @param schemaTarget The schemaTarget of the task execution + * @param platformName The platform name of the task execution + * @return The task execution + */ + AggregateTaskExecution from(TaskExecution execution, String schemaTarget, String platformName); +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateTaskConfiguration.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateTaskConfiguration.java new file mode 100644 index 0000000000..6b8b81dd2a --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateTaskConfiguration.java @@ -0,0 +1,98 @@ +/* + * Copyright 2017-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.aggregate.task; + +import javax.annotation.PostConstruct; +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.aggregate.task.impl.AggregateDataFlowTaskExecutionQueryDao; +import org.springframework.cloud.dataflow.aggregate.task.impl.DefaultAggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.impl.DefaultAggregateTaskExplorer; +import org.springframework.cloud.dataflow.aggregate.task.impl.DefaultTaskRepositoryContainer; +import org.springframework.cloud.dataflow.registry.service.AppRegistryService; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.schema.service.SchemaServiceConfiguration; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.util.Assert; + +/** + * Configuration for aggregate task related components. + * + * @author Corneil du Plessis + */ +@Configuration +@Import(SchemaServiceConfiguration.class) +public class AggregateTaskConfiguration { + private static final Logger logger = LoggerFactory.getLogger(AggregateTaskConfiguration.class); + + + @Bean + public DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao( + DataSource dataSource, + SchemaService schemaService + ) { + return new AggregateDataFlowTaskExecutionQueryDao(dataSource, schemaService); + } + + @Bean + public AggregateExecutionSupport aggregateExecutionSupport( + AppRegistryService registryService, + SchemaService schemaService + ) { + return new DefaultAggregateExecutionSupport(registryService, schemaService); + } + + @Bean + public TaskRepositoryContainer taskRepositoryContainer( + DataSource dataSource, + SchemaService schemaService + ) { + return new DefaultTaskRepositoryContainer(dataSource, schemaService); + } + + @Bean + public AggregateTaskExplorer aggregateTaskExplorer( + DataSource dataSource, + DataflowTaskExecutionQueryDao taskExecutionQueryDao, + SchemaService schemaService, + AggregateExecutionSupport aggregateExecutionSupport, + TaskDefinitionReader taskDefinitionReader, + TaskDeploymentReader taskDeploymentReader + ) { + Assert.notNull(dataSource, "dataSource required"); + Assert.notNull(taskExecutionQueryDao, "taskExecutionQueryDao required"); + Assert.notNull(schemaService, "schemaService required"); + Assert.notNull(aggregateExecutionSupport, "aggregateExecutionSupport required"); + Assert.notNull(taskDefinitionReader, "taskDefinitionReader required"); + Assert.notNull(taskDeploymentReader, "taskDeploymentReader required"); + return new DefaultAggregateTaskExplorer(dataSource, + taskExecutionQueryDao, + schemaService, + aggregateExecutionSupport, + taskDefinitionReader, + taskDeploymentReader); + } + + @PostConstruct + public void setup() { + logger.info("created: org.springframework.cloud.dataflow.aggregate.task.AggregateTaskConfiguration"); + } +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateTaskExplorer.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateTaskExplorer.java new file mode 100644 index 0000000000..a5390fc1a0 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateTaskExplorer.java @@ -0,0 +1,191 @@ +/* + * Copyright 2023-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.aggregate.task; + +import java.util.Collection; +import java.util.Date; +import java.util.List; +import java.util.Set; + +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; + +/** + * Provides for exploring tasks for multiple schema targets. + * + * @author Corneil du Plessis + */ +public interface AggregateTaskExplorer { + /** + * find a task execution given an execution id and schema target. + * + * @param executionId the task execution id + * @param schemaTarget the schema target + * @return the task execution + */ + AggregateTaskExecution getTaskExecution(long executionId, String schemaTarget); + + /** + * find a task execution given an external execution id and platform name. + * + * @param externalExecutionId the external execution id + * @param platform the platform name + * @return the task execution + */ + AggregateTaskExecution getTaskExecutionByExternalExecutionId(String externalExecutionId, String platform); + + List findChildTaskExecutions(long executionId, String schemaTarget); + + List findChildTaskExecutions(Collection parentIds, String schemaTarget); + + /** + * Retrieve a collection of taskExecutions that have the task name provided. + * + * @param taskName the name of the task + * @param pageable the constraints for the search + * @return the set of running executions for tasks with the specified name + */ + Page findRunningTaskExecutions(String taskName, Pageable pageable); + + /** + * Retrieve a list of available task names. + * + * @return the task names that have been executed + */ + List getTaskNames(); + + /** + * Get number of executions for a taskName. + * + * @param taskName the name of the task to be searched + * @return the number of running tasks that have the taskname specified + */ + long getTaskExecutionCountByTaskName(String taskName); + + /** + * Retrieves current number of task executions. + * + * @return current number of task executions. + */ + long getTaskExecutionCount(); + + /** + * Retrieves current number of running task executions. + * + * @return current number of running task executions. + */ + long getRunningTaskExecutionCount(); + + /** + * Get a list of executions for a task by name and completion status. + * + * @param taskName the name of the task to be searched + * @param onlyCompleted whether to include only completed tasks + * @return list of task executions + */ + List findTaskExecutions(String taskName, boolean onlyCompleted); + + /** + * Get a list of executions for a task by name, completion status and end time. + * + * @param taskName the name of the task to be searched + * @param endTime the tasks that ended before the endTime + * @return list of task executions + * @since 2.11.0 + */ + List findTaskExecutionsBeforeEndTime(String taskName, Date endTime); + + /** + * Get a collection/page of executions. + * + * @param taskName the name of the task to be searched + * @param pageable the constraints for the search + * @return list of task executions + */ + Page findTaskExecutionsByName(String taskName, Pageable pageable); + + /** + * Retrieves all the task executions within the pageable constraints sorted by start + * date descending, taskExecution id descending. + * + * @param pageable the constraints for the search + * @return page containing the results from the search + */ + Page findAll(Pageable pageable); + + /** + * Retrieves all the task executions within the pageable constraints sorted by start + * date descending, taskExecution id descending. + * + * @param pageable the constraints for the search + * @param thinResults Indicated if arguments will be populated + * @return page containing the results from the search + */ + Page findAll(Pageable pageable, boolean thinResults); + /** + * Returns the id of the TaskExecution that the requested Spring Batch job execution + * was executed within the context of. Returns null if none were found. + * + * @param jobExecutionId the id of the JobExecution + * @param schemaTarget the schema target + * @return the id of the {@link TaskExecution} + */ + Long getTaskExecutionIdByJobExecutionId(long jobExecutionId, String schemaTarget); + + /** + * Returns a Set of JobExecution ids for the jobs that were executed within the scope + * of the requested task. + * + * @param taskExecutionId id of the {@link TaskExecution} + * @param schemaTarget the schema target + * @return a Set of the ids of the job executions executed within the + * task. + */ + Set getJobExecutionIdsByTaskExecutionId(long taskExecutionId, String schemaTarget); + + /** + * Returns a {@link List} of the latest {@link TaskExecution} for 1 or more task + * names. + *

+ * Latest is defined by the most recent start time. A {@link TaskExecution} does not + * have to be finished (The results may including pending {@link TaskExecution}s). + *

+ * It is theoretically possible that a {@link TaskExecution} with the same name to + * have more than 1 {@link TaskExecution} for the exact same start time. In that case + * the {@link TaskExecution} with the highest Task Execution ID is returned. + *

+ * This method will not consider end times in its calculations. Thus, when a task + * execution {@code A} starts after task execution {@code B} but finishes BEFORE task + * execution {@code A}, then task execution {@code B} is being returned. + * + * @param taskNames At least 1 task name must be provided + * @return List of TaskExecutions. May be empty but never null. + */ + List getLatestTaskExecutionsByTaskNames(String... taskNames); + + /** + * Returns the latest task execution for a given task name. Will ultimately apply the + * same algorithm underneath as {@link #getLatestTaskExecutionsByTaskNames(String...)} + * but will only return a single result. + * + * @param taskName Must not be null or empty + * @return The latest Task Execution or null + * @see #getLatestTaskExecutionsByTaskNames(String...) + */ + AggregateTaskExecution getLatestTaskExecutionForTaskName(String taskName); +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/DataflowTaskExecutionQueryDao.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/DataflowTaskExecutionQueryDao.java new file mode 100644 index 0000000000..c33e14fb66 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/DataflowTaskExecutionQueryDao.java @@ -0,0 +1,208 @@ +/* + * Copyright 2017-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.aggregate.task; + +import java.util.Collection; +import java.util.Date; +import java.util.List; + +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.cloud.task.repository.dao.TaskExecutionDao; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.lang.NonNull; + +/** + * Repository to access {@link TaskExecution}s. Mirrors the {@link TaskExecutionDao} + * but contains Spring Cloud Data Flow specific operations. This functionality might + * be migrated to Spring Cloud Task itself. + * + * @author Corneil du Plessis + * @since 2.11.0 + */ +public interface DataflowTaskExecutionQueryDao { + /** + * Retrieves a task execution from the task repository. + * + * @param executionId the id associated with the task execution. + * @param schemaTarget the schema target. + * @return a fully qualified TaskExecution instance. + */ + AggregateTaskExecution getTaskExecution(long executionId, String schemaTarget); + + /** + * Retrieves a list of task executions where the provided execution id and schemaTarget represents the parent of task execution. + * + * @param executionId parent task execution id + * @param schemaTarget parent task schema target + * @return the task executions + */ + List findChildTaskExecutions(long executionId, String schemaTarget); + + /** + * Retrieves a list of task executions where the provided execution ids and schemaTarget represents the parents of task executions. + * + * @param parentIds parent task execution ids + * @param schemaTarget parent task schema target + * @return the task executions + */ + List findChildTaskExecutions(Collection parentIds, String schemaTarget); + + /** + * Find task executions by task name and completion status. + * + * @param taskName the name of the task to search for in the repository. + * @param completed whether to include only completed task executions. + * @return list of task executions + */ + List findTaskExecutions(String taskName, boolean completed); + + /** + * Find task executions by task name whose end date is before the specified date. + * + * @param taskName the name of the task to search for in the repository. + * @param endTime the time before the task ended. + * @return list of task executions. + */ + List findTaskExecutionsBeforeEndTime(String taskName, @NonNull Date endTime); + + /** + * Retrieves current number of task executions for a taskName. + * + * @param taskName the name of the task + * @return current number of task executions for the taskName. + */ + long getTaskExecutionCountByTaskName(String taskName); + + /** + * Retrieves current number of task executions for a taskName and with a non-null endTime before the specified date. + * + * @param taskName the name of the task + * @param endTime the time before task ended + * @return the number of completed task executions + */ + long getCompletedTaskExecutionCountByTaskNameAndBeforeDate(String taskName, @NonNull Date endTime); + + /** + * Retrieves current number of task executions for a taskName and with a non-null endTime. + * + * @param taskName the name of the task + * @return the number of completed task executions + */ + long getCompletedTaskExecutionCountByTaskName(String taskName); + + /** + * Retrieves current number of task executions for a taskName and with an endTime of + * null. + * + * @param taskName the name of the task to search for in the repository. + * @return the number of running task executions + */ + long getRunningTaskExecutionCountByTaskName(String taskName); + + /** + * Retrieves current number of task executions with an endTime of null. + * + * @return current number of task executions. + */ + long getRunningTaskExecutionCount(); + + /** + * Retrieves current number of task executions. + * + * @return current number of task executions. + */ + long getTaskExecutionCount(); + + /** + * Retrieves a set of task executions that are running for a taskName. + * + * @param taskName the name of the task to search for in the repository. + * @param pageable the constraints for the search. + * @return set of running task executions. + */ + Page findRunningTaskExecutions(String taskName, Pageable pageable); + + /** + * Retrieves a subset of task executions by task name, start location and size. + * + * @param taskName the name of the task to search for in the repository. + * @param pageable the constraints for the search. + * @return a list that contains task executions from the query bound by the start + * position and count specified by the user. + */ + Page findTaskExecutionsByName(String taskName, Pageable pageable); + + /** + * Retrieves a sorted list of distinct task names for the task executions. + * + * @return a list of distinct task names from the task repository.. + */ + List getTaskNames(); + + /** + * Retrieves all the task executions within the pageable constraints. + * + * @param pageable the constraints for the search + * @return page containing the results from the search + */ + + Page findAll(Pageable pageable); + + /** + * Retrieves all the task executions within the pageable constraints. + * @param pageable the constraints for the search + * @param thinResults Indicated if arguments will be populated + * @return page containing the results from the search + */ + + Page findAll(Pageable pageable, boolean thinResults); + + /** + * Returns a {@link List} of the latest {@link TaskExecution} for 1 or more task + * names. + *

+ * Latest is defined by the most recent start time. A {@link TaskExecution} does not + * have to be finished (The results may including pending {@link TaskExecution}s). + *

+ * It is theoretically possible that a {@link TaskExecution} with the same name to + * have more than 1 {@link TaskExecution} for the exact same start time. In that case + * the {@link TaskExecution} with the highest Task Execution ID is returned. + *

+ * This method will not consider end times in its calculations. Thus, when a task + * execution {@code A} starts after task execution {@code B} but finishes BEFORE task + * execution {@code A}, then task execution {@code B} is being returned. + * + * @param taskNames At least 1 task name must be provided + * @return List of TaskExecutions. May be empty but never null. + */ + List getLatestTaskExecutionsByTaskNames(String... taskNames); + + /** + * Returns the latest task execution for a given task name. Will ultimately apply the + * same algorithm underneath as {@link #getLatestTaskExecutionsByTaskNames(String...)} + * but will only return a single result. + * + * @param taskName Must not be null or empty + * @return The latest Task Execution or null + * @see #getLatestTaskExecutionsByTaskNames(String...) + */ + AggregateTaskExecution getLatestTaskExecutionForTaskName(String taskName); + + AggregateTaskExecution geTaskExecutionByExecutionId(String executionId, String taskName); + +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskDefinitionReader.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskDefinitionReader.java new file mode 100644 index 0000000000..a88434e8b4 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskDefinitionReader.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.aggregate.task; + +import org.springframework.cloud.dataflow.core.TaskDefinition; + +public interface TaskDefinitionReader { + TaskDefinition findTaskDefinition(String taskName); +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskDeploymentReader.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskDeploymentReader.java new file mode 100644 index 0000000000..768ee84069 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskDeploymentReader.java @@ -0,0 +1,11 @@ +package org.springframework.cloud.dataflow.aggregate.task; + +import java.util.List; + +import org.springframework.cloud.dataflow.core.TaskDeployment; + +public interface TaskDeploymentReader { + TaskDeployment getDeployment(String externalTaskId); + TaskDeployment getDeployment(String externalTaskId, String platform); + TaskDeployment findByDefinitionName(String definitionName); +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskRepositoryContainer.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskRepositoryContainer.java new file mode 100644 index 0000000000..77dae057a2 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskRepositoryContainer.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.aggregate.task; + +import org.springframework.cloud.task.repository.TaskRepository; + +public interface TaskRepositoryContainer { + TaskRepository get(String schemaTarget); +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/AggregateDataFlowTaskExecutionQueryDao.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/AggregateDataFlowTaskExecutionQueryDao.java new file mode 100644 index 0000000000..e494b87ee7 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/AggregateDataFlowTaskExecutionQueryDao.java @@ -0,0 +1,619 @@ +/* + * Copyright 2023-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.aggregate.task.impl; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.item.database.Order; +import org.springframework.cloud.dataflow.aggregate.task.DataflowTaskExecutionQueryDao; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.task.repository.database.PagingQueryProvider; +import org.springframework.cloud.task.repository.database.support.SqlPagingQueryProviderFactoryBean; +import org.springframework.dao.EmptyResultDataAccessException; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.jdbc.core.RowCallbackHandler; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.lang.NonNull; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; + +/** + * Provide aggregate data for Boot 3 and Boot <=2 TaskExecutions. + * + * @author Corneil du Plessis + */ + +public class AggregateDataFlowTaskExecutionQueryDao implements DataflowTaskExecutionQueryDao { + private final static Logger logger = LoggerFactory.getLogger(AggregateDataFlowTaskExecutionQueryDao.class); + + /** + * SELECT clause for task execution. + */ + public static final String SELECT_CLAUSE = "TASK_EXECUTION_ID, " + + "START_TIME, END_TIME, TASK_NAME, EXIT_CODE, " + + "EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, " + + "EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID, SCHEMA_TARGET "; + + /** + * FROM clause for task execution. + */ + public static final String FROM_CLAUSE = "AGGREGATE_TASK_EXECUTION"; + + /** + * WHERE clause for running task. + */ + public static final String RUNNING_TASK_WHERE_CLAUSE = "where TASK_NAME = :taskName AND END_TIME IS NULL "; + + /** + * WHERE clause for task name. + */ + public static final String TASK_NAME_WHERE_CLAUSE = "where TASK_NAME = :taskName "; + + private static final String FIND_TASK_ARGUMENTS = "SELECT TASK_EXECUTION_ID, " + + "TASK_PARAM from AGGREGATE_TASK_EXECUTION_PARAMS where TASK_EXECUTION_ID = :taskExecutionId and SCHEMA_TARGET = :schemaTarget"; + + private static final String FIND_TASKS_ARGUMENTS = "SELECT TASK_EXECUTION_ID, " + + "TASK_PARAM from AGGREGATE_TASK_EXECUTION_PARAMS where TASK_EXECUTION_ID IN (:taskExecutionIds) and SCHEMA_TARGET = :schemaTarget"; + + private static final String GET_EXECUTIONS = "SELECT " + SELECT_CLAUSE + + " from AGGREGATE_TASK_EXECUTION"; + + private static final String GET_EXECUTION_BY_ID = GET_EXECUTIONS + + " where TASK_EXECUTION_ID = :taskExecutionId and SCHEMA_TARGET = :schemaTarget"; + + private final static String GET_CHILD_EXECUTION_BY_ID = GET_EXECUTIONS + + " where PARENT_EXECUTION_ID = :taskExecutionId" + + " and (SELECT COUNT(*) FROM AGGREGATE_TASK_EXECUTION_PARAMS P " + + " WHERE P.TASK_EXECUTION_ID=TASK_EXECUTION_ID " + + " AND P.SCHEMA_TARGET=SCHEMA_TARGET" + + " AND P.TASK_PARAM = :schemaTarget) > 0"; + + private final static String GET_CHILD_EXECUTION_BY_IDS = GET_EXECUTIONS + + " where PARENT_EXECUTION_ID IN (:taskExecutionIds)" + + " and (SELECT COUNT(*) FROM AGGREGATE_TASK_EXECUTION_PARAMS P " + + " WHERE P.TASK_EXECUTION_ID=TASK_EXECUTION_ID " + + " AND P.SCHEMA_TARGET=SCHEMA_TARGET" + + " AND P.TASK_PARAM = :schemaTarget) > 0"; + + private static final String GET_EXECUTION_BY_EXTERNAL_EXECUTION_ID = GET_EXECUTIONS + + " where EXTERNAL_EXECUTION_ID = :externalExecutionId and TASK_NAME = :taskName"; + + private static final String GET_EXECUTIONS_BY_NAME_COMPLETED = GET_EXECUTIONS + + " where TASK_NAME = :taskName AND END_TIME IS NOT NULL"; + + private static final String GET_EXECUTIONS_BY_NAME = GET_EXECUTIONS + + " where TASK_NAME = :taskName"; + + private static final String GET_EXECUTIONS_COMPLETED = GET_EXECUTIONS + + " where END_TIME IS NOT NULL"; + + private static final String GET_EXECUTION_BY_NAME_COMPLETED_BEFORE_END_TIME = GET_EXECUTIONS + + " where TASK_NAME = :taskName AND END_TIME IS NOT NULL AND END_TIME < :endTime"; + + private static final String GET_EXECUTIONS_COMPLETED_BEFORE_END_TIME = GET_EXECUTIONS + + " where END_TIME IS NOT NULL AND END_TIME < :endTime"; + + private static final String TASK_EXECUTION_COUNT = "SELECT COUNT(*) FROM " + + "AGGREGATE_TASK_EXECUTION "; + + private static final String TASK_EXECUTION_COUNT_BY_NAME = "SELECT COUNT(*) FROM " + + "AGGREGATE_TASK_EXECUTION where TASK_NAME = :taskName"; + + private static final String TASK_EXECUTION_COUNT_BY_NAME_AND_BEFORE_END_TIME = "SELECT COUNT(*) FROM " + + "AGGREGATE_TASK_EXECUTION where TASK_NAME = :taskName AND END_TIME < :endTime"; + + private static final String COMPLETED_TASK_EXECUTION_COUNT = "SELECT COUNT(*) FROM " + + "AGGREGATE_TASK_EXECUTION WHERE END_TIME IS NOT NULL"; + + private static final String COMPLETED_TASK_EXECUTION_COUNT_AND_BEFORE_END_TIME = "SELECT COUNT(*) FROM " + + "AGGREGATE_TASK_EXECUTION WHERE END_TIME IS NOT NULL AND END_TIME < :endTime"; + + private static final String COMPLETED_TASK_EXECUTION_COUNT_BY_NAME = "SELECT COUNT(*) FROM " + + "AGGREGATE_TASK_EXECUTION where TASK_NAME = :taskName AND END_TIME IS NOT NULL "; + + private static final String COMPLETED_TASK_EXECUTION_COUNT_BY_NAME_AND_BEFORE_END_TIME = "SELECT COUNT(*) FROM " + + "AGGREGATE_TASK_EXECUTION where TASK_NAME = :taskName AND END_TIME IS NOT NULL AND END_TIME < :endTime "; + + + private static final String RUNNING_TASK_EXECUTION_COUNT_BY_NAME = "SELECT COUNT(*) FROM " + + "AGGREGATE_TASK_EXECUTION where TASK_NAME = :taskName AND END_TIME IS NULL "; + + private static final String RUNNING_TASK_EXECUTION_COUNT = "SELECT COUNT(*) FROM " + + "AGGREGATE_TASK_EXECUTION where END_TIME IS NULL "; + + private static final String LAST_TASK_EXECUTIONS_BY_TASK_NAMES = "select TE2.* from (" + + "select MAX(TE.TASK_EXECUTION_ID) as TASK_EXECUTION_ID, TE.TASK_NAME, TE.START_TIME from (" + + "select TASK_NAME, MAX(START_TIME) as START_TIME" + + " FROM AGGREGATE_TASK_EXECUTION where TASK_NAME in (:taskNames)" + + " GROUP BY TASK_NAME) TE_MAX" + + " inner join AGGREGATE_TASK_EXECUTION TE ON TE.TASK_NAME = TE_MAX.TASK_NAME AND TE.START_TIME = TE_MAX.START_TIME" + + " group by TE.TASK_NAME, TE.START_TIME" + ") TE1" + + " inner join AGGREGATE_TASK_EXECUTION TE2 ON TE1.TASK_EXECUTION_ID = TE2.TASK_EXECUTION_ID AND TE1.SCHEMA_TARGET = TE2.SCHEMA_TARGET" + + " order by TE2.START_TIME DESC, TE2.TASK_EXECUTION_ID DESC"; + + private static final String FIND_TASK_NAMES = "SELECT distinct TASK_NAME from AGGREGATE_TASK_EXECUTION order by TASK_NAME"; + + private static final Set validSortColumns = new HashSet<>(10); + + static { + validSortColumns.add("TASK_EXECUTION_ID"); + validSortColumns.add("START_TIME"); + validSortColumns.add("END_TIME"); + validSortColumns.add("TASK_NAME"); + validSortColumns.add("EXIT_CODE"); + validSortColumns.add("EXIT_MESSAGE"); + validSortColumns.add("ERROR_MESSAGE"); + validSortColumns.add("LAST_UPDATED"); + validSortColumns.add("EXTERNAL_EXECUTION_ID"); + validSortColumns.add("PARENT_EXECUTION_ID"); + validSortColumns.add("SCHEMA_TARGET"); + } + + private final NamedParameterJdbcTemplate jdbcTemplate; + + private final DataSource dataSource; + + private final LinkedHashMap orderMap; + + private final SchemaService schemaService; + + /** + * Initializes the AggregateDataFlowJobExecutionDao. + * + * @param dataSource used by the dao to execute queries and update the tables. + * @param schemaService used the find schema target information + */ + public AggregateDataFlowTaskExecutionQueryDao(DataSource dataSource, SchemaService schemaService) { + Assert.notNull(dataSource, "The dataSource must not be null."); + this.jdbcTemplate = new NamedParameterJdbcTemplate(dataSource); + this.dataSource = dataSource; + this.schemaService = schemaService; + this.orderMap = new LinkedHashMap<>(); + this.orderMap.put("START_TIME", Order.DESCENDING); + this.orderMap.put("TASK_EXECUTION_ID", Order.DESCENDING); + } + + @Override + public AggregateTaskExecution geTaskExecutionByExecutionId(String externalExecutionId, String taskName) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("externalExecutionId", externalExecutionId) + .addValue("taskName", taskName); + + try { + return this.jdbcTemplate.queryForObject( + GET_EXECUTION_BY_EXTERNAL_EXECUTION_ID, + queryParameters, + new CompositeTaskExecutionRowMapper(true) + ); + } catch (EmptyResultDataAccessException e) { + return null; + } + } + + @Override + public AggregateTaskExecution getTaskExecution(long executionId, String schemaTarget) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskExecutionId", executionId, Types.BIGINT) + .addValue("schemaTarget", schemaTarget); + + try { + return this.jdbcTemplate.queryForObject( + GET_EXECUTION_BY_ID, + queryParameters, + new CompositeTaskExecutionRowMapper(true) + ); + } catch (EmptyResultDataAccessException e) { + return null; + } + } + + @Override + public List findChildTaskExecutions(long executionId, String schemaTarget) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskExecutionId", executionId, Types.BIGINT) + .addValue("schemaTarget", "--spring.cloud.task.parent-schema-target=" + schemaTarget); + + try { + return this.jdbcTemplate.query( + GET_CHILD_EXECUTION_BY_ID, + queryParameters, + new CompositeTaskExecutionRowMapper(true) + ); + } catch (EmptyResultDataAccessException e) { + return null; + } + } + + @Override + public List findChildTaskExecutions(Collection parentIds, String schemaTarget) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskExecutionIds", parentIds) + .addValue("schemaTarget", "--spring.cloud.task.parent-schema-target=" + schemaTarget); + + try { + List result = this.jdbcTemplate.query( + GET_CHILD_EXECUTION_BY_IDS, + queryParameters, + new CompositeTaskExecutionRowMapper(false) + ); + populateArguments(schemaTarget, result); + return result; + } catch (EmptyResultDataAccessException e) { + return null; + } + } + + private void populateArguments(String schemaTarget, List result) { + List ids = result.stream().map(AggregateTaskExecution::getExecutionId).collect(Collectors.toList()); + Map> paramMap = getTaskArgumentsForTasks(ids, schemaTarget); + result.forEach(aggregateTaskExecution -> { + List params = paramMap.get(aggregateTaskExecution.getExecutionId()); + if(params != null) { + aggregateTaskExecution.setArguments(params); + } + }); + } + + @Override + public List findTaskExecutions(String taskName, boolean completed) { + List result; + if (StringUtils.hasLength(taskName)) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskName", taskName); + String query = completed ? GET_EXECUTIONS_BY_NAME_COMPLETED : GET_EXECUTIONS_BY_NAME; + result = this.jdbcTemplate.query(query, queryParameters, new CompositeTaskExecutionRowMapper(false)); + } else { + result = this.jdbcTemplate.query(completed ? GET_EXECUTIONS_COMPLETED : GET_EXECUTIONS, Collections.emptyMap(), new CompositeTaskExecutionRowMapper(false)); + } + result.stream() + .collect(Collectors.groupingBy(AggregateTaskExecution::getSchemaTarget)) + .forEach(this::populateArguments); + return result; + } + + @Override + public List findTaskExecutionsBeforeEndTime(String taskName, @NonNull Date endTime) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskName", taskName) + .addValue("endTime", endTime); + String query; + query = taskName.isEmpty() ? GET_EXECUTIONS_COMPLETED_BEFORE_END_TIME : GET_EXECUTION_BY_NAME_COMPLETED_BEFORE_END_TIME; + List result = this.jdbcTemplate.query(query, queryParameters, new CompositeTaskExecutionRowMapper(false)); + result.stream() + .collect(Collectors.groupingBy(AggregateTaskExecution::getSchemaTarget)) + .forEach(this::populateArguments); + return result; + } + + @Override + public long getTaskExecutionCountByTaskName(String taskName) { + Long count; + if (StringUtils.hasText(taskName)) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskName", taskName, Types.VARCHAR); + + try { + count = this.jdbcTemplate.queryForObject(TASK_EXECUTION_COUNT_BY_NAME, queryParameters, Long.class); + } catch (EmptyResultDataAccessException e) { + count = 0L; + } + } else { + count = this.jdbcTemplate.queryForObject(TASK_EXECUTION_COUNT, Collections.emptyMap(), Long.class); + } + return count != null ? count : 0L; + } + + @Override + public long getCompletedTaskExecutionCountByTaskName(String taskName) { + Long count; + if (StringUtils.hasText(taskName)) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskName", taskName, Types.VARCHAR); + + try { + count = this.jdbcTemplate.queryForObject(COMPLETED_TASK_EXECUTION_COUNT_BY_NAME, queryParameters, Long.class); + } catch (EmptyResultDataAccessException e) { + count = 0L; + } + } else { + count = this.jdbcTemplate.queryForObject(COMPLETED_TASK_EXECUTION_COUNT, Collections.emptyMap(), Long.class); + } + return count != null ? count : 0L; + } + + @Override + public long getCompletedTaskExecutionCountByTaskNameAndBeforeDate(String taskName, @NonNull Date endTime) { + Long count; + if (StringUtils.hasText(taskName)) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskName", taskName, Types.VARCHAR) + .addValue("endTime", endTime, Types.DATE); + + try { + count = this.jdbcTemplate.queryForObject(COMPLETED_TASK_EXECUTION_COUNT_BY_NAME_AND_BEFORE_END_TIME, queryParameters, Long.class); + } catch (EmptyResultDataAccessException e) { + count = 0L; + } + } else { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("endTime", endTime, Types.DATE); + count = this.jdbcTemplate.queryForObject(COMPLETED_TASK_EXECUTION_COUNT_AND_BEFORE_END_TIME, queryParameters, Long.class); + } + return count != null ? count : 0L; + } + + @Override + public long getRunningTaskExecutionCountByTaskName(String taskName) { + Long count; + if (StringUtils.hasText(taskName)) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskName", taskName, Types.VARCHAR); + + try { + logger.debug("getRunningTaskExecutionCountByTaskName:{}:sql={}", taskName, RUNNING_TASK_EXECUTION_COUNT_BY_NAME); + count = this.jdbcTemplate.queryForObject(RUNNING_TASK_EXECUTION_COUNT_BY_NAME, queryParameters, Long.class); + } catch (EmptyResultDataAccessException e) { + count = 0L; + } + } else { + logger.debug("getRunningTaskExecutionCountByTaskName:{}:sql={}", taskName, RUNNING_TASK_EXECUTION_COUNT); + count = this.jdbcTemplate.queryForObject(RUNNING_TASK_EXECUTION_COUNT, Collections.emptyMap(), Long.class); + + } + return count != null ? count : 0L; + } + + @Override + public long getRunningTaskExecutionCount() { + try { + final SqlParameterSource queryParameters = new MapSqlParameterSource(); + Long result = this.jdbcTemplate.queryForObject(RUNNING_TASK_EXECUTION_COUNT, queryParameters, Long.class); + return result != null ? result : 0L; + } catch (EmptyResultDataAccessException e) { + return 0; + } + } + + @Override + public List getLatestTaskExecutionsByTaskNames(String... taskNames) { + Assert.notEmpty(taskNames, "At least 1 task name must be provided."); + final List taskNamesAsList = new ArrayList<>(); + + for (String taskName : taskNames) { + if (StringUtils.hasText(taskName)) { + taskNamesAsList.add(taskName); + } + } + + Assert.isTrue(taskNamesAsList.size() == taskNames.length, String.format( + "Task names must not contain any empty elements but %s of %s were empty or null.", + taskNames.length - taskNamesAsList.size(), taskNames.length)); + + try { + final Map> paramMap = Collections + .singletonMap("taskNames", taskNamesAsList); + List result = this.jdbcTemplate.query(LAST_TASK_EXECUTIONS_BY_TASK_NAMES, paramMap, new CompositeTaskExecutionRowMapper(false)); + result.stream() + .collect(Collectors.groupingBy(AggregateTaskExecution::getSchemaTarget)) + .forEach(this::populateArguments); + return result; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } + } + + @Override + public AggregateTaskExecution getLatestTaskExecutionForTaskName(String taskName) { + Assert.hasText(taskName, "The task name must not be empty."); + final List taskExecutions = this + .getLatestTaskExecutionsByTaskNames(taskName); + if (taskExecutions.isEmpty()) { + return null; + } else if (taskExecutions.size() == 1) { + return taskExecutions.get(0); + } else { + throw new IllegalStateException( + "Only expected a single TaskExecution but received " + + taskExecutions.size()); + } + } + + @Override + public long getTaskExecutionCount() { + try { + Long count = this.jdbcTemplate.queryForObject(TASK_EXECUTION_COUNT, new MapSqlParameterSource(), Long.class); + return count != null ? count : 0; + } catch (EmptyResultDataAccessException e) { + return 0; + } + } + + @Override + public Page findRunningTaskExecutions(String taskName, Pageable pageable) { + return queryForPageableResults(pageable, SELECT_CLAUSE, FROM_CLAUSE, + RUNNING_TASK_WHERE_CLAUSE, + new MapSqlParameterSource("taskName", taskName), + getRunningTaskExecutionCountByTaskName(taskName), false); + } + + @Override + public Page findTaskExecutionsByName(String taskName, Pageable pageable) { + return queryForPageableResults(pageable, SELECT_CLAUSE, FROM_CLAUSE, + TASK_NAME_WHERE_CLAUSE, new MapSqlParameterSource("taskName", taskName), + getTaskExecutionCountByTaskName(taskName), false); + } + + @Override + public List getTaskNames() { + return this.jdbcTemplate.queryForList(FIND_TASK_NAMES, + new MapSqlParameterSource(), String.class); + } + + @Override + public Page findAll(Pageable pageable) { + return queryForPageableResults(pageable, SELECT_CLAUSE, FROM_CLAUSE, null, + new MapSqlParameterSource(), getTaskExecutionCount(), false); + } + + @Override + public Page findAll(Pageable pageable, boolean thinResults) { + return queryForPageableResults(pageable, SELECT_CLAUSE, FROM_CLAUSE, null, + new MapSqlParameterSource(), getTaskExecutionCount(), thinResults); + } + + private Page queryForPageableResults( + Pageable pageable, + String selectClause, + String fromClause, + String whereClause, + MapSqlParameterSource queryParameters, + long totalCount, + boolean thinResults + ) { + SqlPagingQueryProviderFactoryBean factoryBean = new SqlPagingQueryProviderFactoryBean(); + factoryBean.setSelectClause(selectClause); + factoryBean.setFromClause(fromClause); + if (StringUtils.hasText(whereClause)) { + factoryBean.setWhereClause(whereClause); + } + final Sort sort = pageable.getSort(); + final LinkedHashMap sortOrderMap = new LinkedHashMap<>(); + + if (sort != null) { + for (Sort.Order sortOrder : sort) { + if (validSortColumns.contains(sortOrder.getProperty().toUpperCase())) { + sortOrderMap.put(sortOrder.getProperty(), + sortOrder.isAscending() ? Order.ASCENDING : Order.DESCENDING); + } else { + throw new IllegalArgumentException( + String.format("Invalid sort option selected: %s", sortOrder.getProperty())); + } + } + } + + if (!CollectionUtils.isEmpty(sortOrderMap)) { + factoryBean.setSortKeys(sortOrderMap); + } else { + factoryBean.setSortKeys(this.orderMap); + } + + factoryBean.setDataSource(this.dataSource); + PagingQueryProvider pagingQueryProvider; + try { + pagingQueryProvider = factoryBean.getObject(); + pagingQueryProvider.init(this.dataSource); + } catch (Exception e) { + throw new IllegalStateException(e); + } + String query = pagingQueryProvider.getPageQuery(pageable); + List resultList = this.jdbcTemplate.query(query, + queryParameters, new CompositeTaskExecutionRowMapper(!thinResults)); + resultList.stream() + .collect(Collectors.groupingBy(AggregateTaskExecution::getSchemaTarget)) + .forEach(this::populateArguments); + return new PageImpl<>(resultList, pageable, totalCount); + } + + + private class CompositeTaskExecutionRowMapper implements RowMapper { + final boolean mapRow; + private CompositeTaskExecutionRowMapper(boolean mapRow) { + this.mapRow = mapRow; + } + + @Override + public AggregateTaskExecution mapRow(ResultSet rs, int rowNum) throws SQLException { + long id = rs.getLong("TASK_EXECUTION_ID"); + Long parentExecutionId = rs.getLong("PARENT_EXECUTION_ID"); + if (rs.wasNull()) { + parentExecutionId = null; + } + String schemaTarget = rs.getString("SCHEMA_TARGET"); + if (schemaTarget != null && schemaService.getTarget(schemaTarget) == null) { + logger.warn("Cannot find schemaTarget:{}", schemaTarget); + } + return new AggregateTaskExecution(id, + getNullableExitCode(rs), + rs.getString("TASK_NAME"), + rs.getTimestamp("START_TIME"), + rs.getTimestamp("END_TIME"), + rs.getString("EXIT_MESSAGE"), + mapRow ? getTaskArguments(id, schemaTarget) : Collections.emptyList(), + rs.getString("ERROR_MESSAGE"), + rs.getString("EXTERNAL_EXECUTION_ID"), + parentExecutionId, + null, + null, + schemaTarget + ); + } + + private Integer getNullableExitCode(ResultSet rs) throws SQLException { + int exitCode = rs.getInt("EXIT_CODE"); + return !rs.wasNull() ? exitCode : null; + } + } + + private List getTaskArguments(long taskExecutionId, String schemaTarget) { + final List params = new ArrayList<>(); + RowCallbackHandler handler = rs -> params.add(rs.getString(2)); + MapSqlParameterSource parameterSource = new MapSqlParameterSource("taskExecutionId", taskExecutionId) + .addValue("schemaTarget", schemaTarget); + this.jdbcTemplate.query( + FIND_TASK_ARGUMENTS, + parameterSource, + handler); + return params; + } + private Map> getTaskArgumentsForTasks(Collection taskExecutionIds, String schemaTarget) { + if(taskExecutionIds.isEmpty()) { + return Collections.emptyMap(); + } else { + final Map> result = new HashMap<>(); + RowCallbackHandler handler = rs -> result.computeIfAbsent(rs.getLong(1), a -> new ArrayList<>()) + .add(rs.getString(2)); + MapSqlParameterSource parameterSource = new MapSqlParameterSource("taskExecutionIds", taskExecutionIds) + .addValue("schemaTarget", schemaTarget); + this.jdbcTemplate.query(FIND_TASKS_ARGUMENTS, parameterSource, handler); + return result; + } + } +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultAggregateExecutionSupport.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultAggregateExecutionSupport.java new file mode 100644 index 0000000000..342b09e4d4 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultAggregateExecutionSupport.java @@ -0,0 +1,164 @@ +/* + * Copyright 2023-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.aggregate.task.impl; + +import java.util.List; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskDeploymentReader; +import org.springframework.cloud.dataflow.core.AppRegistration; +import org.springframework.cloud.dataflow.core.ApplicationType; +import org.springframework.cloud.dataflow.core.TaskDefinition; +import org.springframework.cloud.dataflow.core.TaskDeployment; +import org.springframework.cloud.dataflow.registry.service.AppRegistryService; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.util.StringUtils; + +/** + * Provides support for access to SchemaVersionTarget information and conversion of execution data to composite executions. + * + * @author Corneil du Plessis + */ + +public class DefaultAggregateExecutionSupport implements AggregateExecutionSupport { + private static final Logger logger = LoggerFactory.getLogger(AggregateExecutionSupport.class); + + private final AppRegistryService registryService; + + private final SchemaService schemaService; + + public DefaultAggregateExecutionSupport( + AppRegistryService registryService, + SchemaService schemaService + ) { + this.registryService = registryService; + this.schemaService = schemaService; + } + + @Override + public AggregateTaskExecution from(TaskExecution execution, TaskDefinitionReader taskDefinitionReader, TaskDeploymentReader taskDeploymentReader) { + TaskDefinition taskDefinition = taskDefinitionReader.findTaskDefinition(execution.getTaskName()); + TaskDeployment deployment = null; + if (StringUtils.hasText(execution.getExternalExecutionId())) { + deployment = taskDeploymentReader.getDeployment(execution.getExternalExecutionId()); + } else { + if(taskDefinition == null) { + logger.warn("TaskDefinition not found for " + execution.getTaskName()); + } else { + deployment = taskDeploymentReader.findByDefinitionName(taskDefinition.getName()); + } + } + SchemaVersionTarget versionTarget = findSchemaVersionTarget(execution.getTaskName(), taskDefinition); + return from(execution, versionTarget.getName(), deployment != null ? deployment.getPlatformName() : null); + } + + @Override + public SchemaVersionTarget findSchemaVersionTarget(String taskName, TaskDefinitionReader taskDefinitionReader) { + logger.debug("findSchemaVersionTarget:{}", taskName); + TaskDefinition definition = taskDefinitionReader.findTaskDefinition(taskName); + return findSchemaVersionTarget(taskName, definition); + } + + @Override + public SchemaVersionTarget findSchemaVersionTarget(String taskName, String version, TaskDefinitionReader taskDefinitionReader) { + logger.debug("findSchemaVersionTarget:{}:{}", taskName, version); + TaskDefinition definition = taskDefinitionReader.findTaskDefinition(taskName); + return findSchemaVersionTarget(taskName, version, definition); + } + + @Override + public SchemaVersionTarget findSchemaVersionTarget(String taskName, TaskDefinition taskDefinition) { + return findSchemaVersionTarget(taskName, null, taskDefinition); + } + + @Override + public SchemaVersionTarget findSchemaVersionTarget(String taskName, String version, TaskDefinition taskDefinition) { + logger.debug("findSchemaVersionTarget:{}:{}", taskName, version); + String registeredName = taskDefinition != null ? taskDefinition.getRegisteredAppName() : taskName; + AppRegistration registration = findTaskAppRegistration(registeredName, version); + if (registration == null) { + if(StringUtils.hasLength(version)) { + logger.warn("Cannot find AppRegistration for {}:{}", taskName, version); + } else { + logger.warn("Cannot find AppRegistration for {}", taskName); + } + return SchemaVersionTarget.defaultTarget(); + } + final AppRegistration finalRegistration = registration; + List versionTargets = schemaService.getTargets().getSchemas() + .stream() + .filter(target -> target.getSchemaVersion().equals(finalRegistration.getBootVersion())) + .collect(Collectors.toList()); + if (versionTargets.isEmpty()) { + logger.warn("Cannot find a SchemaVersionTarget for {}", registration.getBootVersion()); + return SchemaVersionTarget.defaultTarget(); + } + if (versionTargets.size() > 1) { + throw new IllegalStateException("Multiple SchemaVersionTargets for " + registration.getBootVersion()); + } + SchemaVersionTarget schemaVersionTarget = versionTargets.get(0); + logger.debug("findSchemaVersionTarget:{}:{}:{}={}", taskName, registeredName, version, schemaVersionTarget); + return schemaVersionTarget; + } + + @Override + public AppRegistration findTaskAppRegistration(String registeredName) { + return findTaskAppRegistration(registeredName, null); + } + + @Override + public AppRegistration findTaskAppRegistration(String registeredAppName, String version) { + AppRegistration registration = StringUtils.hasLength(version) ? + registryService.find(registeredAppName, ApplicationType.task, version) : + registryService.find(registeredAppName, ApplicationType.task); + if (registration == null) { + registration = StringUtils.hasLength(version) ? + registryService.find(registeredAppName, ApplicationType.app, version) : + registryService.find(registeredAppName, ApplicationType.app); + } + logger.debug("findTaskAppRegistration:{}:{}={}", registeredAppName, version, registration); + return registration; + } + + @Override + public AggregateTaskExecution from(TaskExecution execution, String schemaTarget, String platformName) { + if (execution != null) { + return new AggregateTaskExecution( + execution.getExecutionId(), + execution.getExitCode(), + execution.getTaskName(), + execution.getStartTime(), + execution.getEndTime(), + execution.getExitMessage(), + execution.getArguments(), + execution.getErrorMessage(), + execution.getExternalExecutionId(), + execution.getParentExecutionId(), + platformName, + null, + schemaTarget); + } + return null; + } +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultAggregateTaskExplorer.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultAggregateTaskExplorer.java new file mode 100644 index 0000000000..a933e1c498 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultAggregateTaskExplorer.java @@ -0,0 +1,306 @@ +/* + * Copyright 2023-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.aggregate.task.impl; + +import javax.annotation.PostConstruct; +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; +import org.springframework.cloud.dataflow.aggregate.task.DataflowTaskExecutionQueryDao; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskDeploymentReader; +import org.springframework.cloud.dataflow.core.TaskDefinition; +import org.springframework.cloud.dataflow.core.TaskDeployment; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaTaskExecutionDaoFactoryBean; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.cloud.task.repository.TaskExplorer; +import org.springframework.cloud.task.repository.support.SimpleTaskExplorer; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Implements CompositeTaskExplorer. This class will be responsible for retrieving task execution data for all schema targets. + * + * @author Corneil du Plessis + */ +public class DefaultAggregateTaskExplorer implements AggregateTaskExplorer { + private final static Logger logger = LoggerFactory.getLogger(DefaultAggregateTaskExplorer.class); + + private final Map taskExplorers; + + private final AggregateExecutionSupport aggregateExecutionSupport; + + private final DataflowTaskExecutionQueryDao taskExecutionQueryDao; + + private final TaskDefinitionReader taskDefinitionReader; + + private final TaskDeploymentReader taskDeploymentReader; + + public DefaultAggregateTaskExplorer( + DataSource dataSource, + DataflowTaskExecutionQueryDao taskExecutionQueryDao, + SchemaService schemaService, + AggregateExecutionSupport aggregateExecutionSupport, + TaskDefinitionReader taskDefinitionReader, + TaskDeploymentReader taskDeploymentReader + ) { + this.taskExecutionQueryDao = taskExecutionQueryDao; + this.aggregateExecutionSupport = aggregateExecutionSupport; + this.taskDefinitionReader = taskDefinitionReader; + this.taskDeploymentReader = taskDeploymentReader; + Map result = new HashMap<>(); + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + TaskExplorer explorer = new SimpleTaskExplorer(new MultiSchemaTaskExecutionDaoFactoryBean(dataSource, target.getTaskPrefix())); + result.put(target.getName(), explorer); + } + taskExplorers = Collections.unmodifiableMap(result); + } + + @Override + public AggregateTaskExecution getTaskExecution(long executionId, String schemaTarget) { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + TaskExplorer taskExplorer = taskExplorers.get(schemaTarget); + Assert.notNull(taskExplorer, "Expected taskExplorer for " + schemaTarget); + TaskExecution taskExecution = taskExplorer.getTaskExecution(executionId); + TaskDeployment deployment = null; + if (taskExecution != null) { + if (StringUtils.hasText(taskExecution.getExternalExecutionId())) { + deployment = taskDeploymentReader.getDeployment(taskExecution.getExternalExecutionId()); + } else { + TaskDefinition definition = taskDefinitionReader.findTaskDefinition(taskExecution.getTaskName()); + if (definition == null) { + logger.warn("Cannot find definition for " + taskExecution.getTaskName()); + } else { + deployment = taskDeploymentReader.findByDefinitionName(definition.getName()); + } + } + } + return aggregateExecutionSupport.from(taskExecution, schemaTarget, deployment != null ? deployment.getPlatformName() : null); + } + + @Override + public AggregateTaskExecution getTaskExecutionByExternalExecutionId(String externalExecutionId, String platform) { + TaskDeployment deployment = taskDeploymentReader.getDeployment(externalExecutionId, platform); + if (deployment != null) { + return this.taskExecutionQueryDao.geTaskExecutionByExecutionId(externalExecutionId, deployment.getTaskDefinitionName()); + } + return null; + } + + @Override + public List findChildTaskExecutions(long executionId, String schemaTarget) { + return this.taskExecutionQueryDao.findChildTaskExecutions(executionId, schemaTarget); + } + + @Override + public List findChildTaskExecutions(Collection parentIds, String schemaTarget) { + return this.taskExecutionQueryDao.findChildTaskExecutions(parentIds, schemaTarget); + } + + @Override + public Page findRunningTaskExecutions(String taskName, Pageable pageable) { + SchemaVersionTarget target = aggregateExecutionSupport.findSchemaVersionTarget(taskName, taskDefinitionReader); + Assert.notNull(target, "Expected to find SchemaVersionTarget for " + taskName); + TaskExplorer taskExplorer = taskExplorers.get(target.getName()); + Assert.notNull(taskExplorer, "Expected TaskExplorer for " + target.getName()); + TaskDefinition definition = taskDefinitionReader.findTaskDefinition(taskName); + if (definition == null) { + logger.warn("Cannot find TaskDefinition for " + taskName); + } + TaskDeployment deployment = definition != null ? taskDeploymentReader.findByDefinitionName(definition.getName()) : null; + final String platformName = deployment != null ? deployment.getPlatformName() : null; + Page executions = taskExplorer.findRunningTaskExecutions(taskName, pageable); + List taskExecutions = executions.getContent() + .stream() + .map(execution -> aggregateExecutionSupport.from(execution, target.getName(), platformName)) + .collect(Collectors.toList()); + return new PageImpl<>(taskExecutions, executions.getPageable(), executions.getTotalElements()); + } + + @Override + public List getTaskNames() { + List result = new ArrayList<>(); + for (TaskExplorer explorer : taskExplorers.values()) { + result.addAll(explorer.getTaskNames()); + } + return result; + } + + @Override + public long getTaskExecutionCountByTaskName(String taskName) { + long result = 0; + for (TaskExplorer explorer : taskExplorers.values()) { + result += explorer.getTaskExecutionCountByTaskName(taskName); + } + return result; + } + + @Override + public long getTaskExecutionCount() { + long result = 0; + for (TaskExplorer explorer : taskExplorers.values()) { + result += explorer.getTaskExecutionCount(); + } + return result; + } + + @Override + public long getRunningTaskExecutionCount() { + long result = 0; + for (TaskExplorer explorer : taskExplorers.values()) { + result += explorer.getRunningTaskExecutionCount(); + } + return result; + } + + @Override + public List findTaskExecutions(String taskName, boolean completed) { + return this.taskExecutionQueryDao.findTaskExecutions(taskName, completed); + } + + @Override + public List findTaskExecutionsBeforeEndTime(String taskName, Date endTime) { + return this.taskExecutionQueryDao.findTaskExecutionsBeforeEndTime(taskName, endTime); + } + + @Override + public Page findTaskExecutionsByName(String taskName, Pageable pageable) { + + String platformName = getPlatformName(taskName); + SchemaVersionTarget target = aggregateExecutionSupport.findSchemaVersionTarget(taskName, taskDefinitionReader); + Assert.notNull(target, "Expected to find SchemaVersionTarget for " + taskName); + TaskExplorer taskExplorer = taskExplorers.get(target.getName()); + Assert.notNull(taskExplorer, "Expected TaskExplorer for " + target.getName()); + Page executions = taskExplorer.findTaskExecutionsByName(taskName, pageable); + List taskExecutions = executions.getContent() + .stream() + .map(execution -> aggregateExecutionSupport.from(execution, target.getName(), platformName)) + .collect(Collectors.toList()); + return new PageImpl<>(taskExecutions, executions.getPageable(), executions.getTotalElements()); + } + + private String getPlatformName(String taskName) { + String platformName = null; + TaskDefinition taskDefinition = taskDefinitionReader.findTaskDefinition(taskName); + if (taskDefinition != null) { + TaskDeployment taskDeployment = taskDeploymentReader.findByDefinitionName(taskDefinition.getName()); + platformName = taskDeployment != null ? taskDeployment.getPlatformName() : null; + } else { + logger.warn("TaskDefinition not found for " + taskName); + } + return platformName; + } + + @Override + public Page findAll(Pageable pageable) { + return taskExecutionQueryDao.findAll(pageable); + } + + @Override + public Page findAll(Pageable pageable, boolean thinResults) { + return taskExecutionQueryDao.findAll(pageable, thinResults); + } + + @Override + public Long getTaskExecutionIdByJobExecutionId(long jobExecutionId, String schemaTarget) { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + TaskExplorer taskExplorer = taskExplorers.get(schemaTarget); + Assert.notNull(taskExplorer, "Expected TaskExplorer for " + schemaTarget); + return taskExplorer.getTaskExecutionIdByJobExecutionId(jobExecutionId); + } + + @Override + public Set getJobExecutionIdsByTaskExecutionId(long taskExecutionId, String schemaTarget) { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + TaskExplorer taskExplorer = taskExplorers.get(schemaTarget); + Assert.notNull(taskExplorer, "Expected TaskExplorer for " + schemaTarget); + return taskExplorer.getJobExecutionIdsByTaskExecutionId(taskExecutionId); + } + private static void add(Map> setMap, String key, String value) { + Set set = setMap.computeIfAbsent(key, (v) -> new HashSet<>()); + set.add(value); + } + @Override + public List getLatestTaskExecutionsByTaskNames(String... taskNames) { + List result = new ArrayList<>(); + Map> targetToTaskNames = new HashMap<>(); + Map taskNamePlatform = new HashMap<>(); + for (String taskName : taskNames) { + SchemaVersionTarget target = aggregateExecutionSupport.findSchemaVersionTarget(taskName, taskDefinitionReader); + String platformName = getPlatformName(taskName); + Assert.notNull(target, "Expected to find SchemaVersionTarget for " + taskName); + add(targetToTaskNames, target.getName(), taskName); + if(platformName != null) { + taskNamePlatform.put(taskName, platformName); + } + } + for(String target : targetToTaskNames.keySet()) { + Set tasks = targetToTaskNames.get(target); + if(!tasks.isEmpty()) { + TaskExplorer taskExplorer = taskExplorers.get(target); + Assert.notNull(taskExplorer, "Expected TaskExplorer for " + target); + List taskExecutions = taskExplorer + .getLatestTaskExecutionsByTaskNames(tasks.toArray(new String[0])) + .stream() + .map(execution -> aggregateExecutionSupport.from(execution, target, taskNamePlatform.get(execution.getTaskName()))) + .collect(Collectors.toList()); + result.addAll(taskExecutions); + } + } + return result; + } + + @Override + public AggregateTaskExecution getLatestTaskExecutionForTaskName(String taskName) { + + SchemaVersionTarget target = aggregateExecutionSupport.findSchemaVersionTarget(taskName, taskDefinitionReader); + Assert.notNull(target, "Expected to find SchemaVersionTarget for " + taskName); + TaskExplorer taskExplorer = taskExplorers.get(target.getName()); + Assert.notNull(taskExplorer, "Expected TaskExplorer for " + target.getName()); + return aggregateExecutionSupport.from(taskExplorer.getLatestTaskExecutionForTaskName(taskName), target.getName(), getPlatformName(taskName)); + } + + @PostConstruct + public void setup() { + logger.info("created: org.springframework.cloud.dataflow.aggregate.task.impl.DefaultAggregateTaskExplorer"); + } +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultTaskRepositoryContainer.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultTaskRepositoryContainer.java new file mode 100644 index 0000000000..3db52d91cc --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultTaskRepositoryContainer.java @@ -0,0 +1,72 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.aggregate.task.impl; + +import javax.annotation.PostConstruct; +import javax.sql.DataSource; +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.aggregate.task.TaskRepositoryContainer; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaTaskExecutionDaoFactoryBean; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.task.repository.TaskRepository; +import org.springframework.cloud.task.repository.support.SimpleTaskRepository; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * This class manages a collection of TaskRepositories for all schemas. + * In the future there will be a datasource container for all names datasources. + * + * @author Corneil du Plessis + */ +public class DefaultTaskRepositoryContainer implements TaskRepositoryContainer { + private final static Logger logger = LoggerFactory.getLogger(DefaultTaskRepositoryContainer.class); + + private final Map taskRepositories = new HashMap<>(); + + public DefaultTaskRepositoryContainer(DataSource dataSource, SchemaService schemaService) { + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + MultiSchemaTaskExecutionDaoFactoryBean taskExecutionDaoFactoryBean = new MultiSchemaTaskExecutionDaoFactoryBean(dataSource, target.getTaskPrefix()); + add(target.getName(), new SimpleTaskRepository(taskExecutionDaoFactoryBean)); + } + } + + private void add(String schemaTarget, TaskRepository taskRepository) { + taskRepositories.put(schemaTarget, taskRepository); + } + + @Override + public TaskRepository get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + TaskRepository repository = taskRepositories.get(schemaTarget); + Assert.notNull(repository, "Expected TaskRepository for " + schemaTarget); + return repository; + } + + @PostConstruct + public void setup() { + logger.info("created: org.springframework.cloud.dataflow.aggregate.task.impl.DefaultTaskRepositoryContainer"); + } +} diff --git a/spring-cloud-dataflow-aggregate-task/src/test/resources/logback-test.xml b/spring-cloud-dataflow-aggregate-task/src/test/resources/logback-test.xml new file mode 100644 index 0000000000..fe13492971 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/test/resources/logback-test.xml @@ -0,0 +1,6 @@ + + + + + + diff --git a/spring-cloud-dataflow-audit/pom.xml b/spring-cloud-dataflow-audit/pom.xml index 6e47601a1f..8bcb261942 100644 --- a/spring-cloud-dataflow-audit/pom.xml +++ b/spring-cloud-dataflow-audit/pom.xml @@ -1,21 +1,32 @@ - + 4.0.0 spring-cloud-dataflow-parent org.springframework.cloud - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-audit + spring-cloud-dataflow-audit + Spring Cloud Data Flow Audit + jar + + true + 3.4.1 + org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-rest-resource + ${project.version} org.springframework.boot @@ -23,4 +34,45 @@ test + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/service/DefaultAuditRecordService.java b/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/service/DefaultAuditRecordService.java index 7eff50695e..84eccdc2df 100644 --- a/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/service/DefaultAuditRecordService.java +++ b/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/service/DefaultAuditRecordService.java @@ -16,6 +16,7 @@ package org.springframework.cloud.dataflow.audit.service; import java.time.Instant; +import java.util.HashMap; import java.util.Map; import java.util.Optional; @@ -26,6 +27,7 @@ import org.slf4j.LoggerFactory; import org.springframework.cloud.dataflow.audit.repository.AuditRecordRepository; +import org.springframework.cloud.dataflow.core.ArgumentSanitizer; import org.springframework.cloud.dataflow.core.AuditActionType; import org.springframework.cloud.dataflow.core.AuditOperationType; import org.springframework.cloud.dataflow.core.AuditRecord; @@ -38,74 +40,107 @@ * * @author Gunnar Hillert * @author Daniel Serleg + * @author Corneil du Plessis */ public class DefaultAuditRecordService implements AuditRecordService { - private static final Logger logger = LoggerFactory.getLogger(DefaultAuditRecordService.class); - - private final AuditRecordRepository auditRecordRepository; - - private final ObjectMapper objectMapper; - - public DefaultAuditRecordService(AuditRecordRepository auditRecordRepository) { - Assert.notNull(auditRecordRepository, "auditRecordRepository must not be null."); - this.auditRecordRepository = auditRecordRepository; - this.objectMapper = new ObjectMapper(); - this.objectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); - } - - public DefaultAuditRecordService(AuditRecordRepository auditRecordRepository, ObjectMapper objectMapper) { - Assert.notNull(auditRecordRepository, "auditRecordRepository must not be null."); - Assert.notNull(objectMapper, "objectMapper must not be null."); - this.auditRecordRepository = auditRecordRepository; - this.objectMapper = objectMapper; - } - - @Override - public AuditRecord populateAndSaveAuditRecord(AuditOperationType auditOperationType, - AuditActionType auditActionType, - String correlationId, String data, String platformName) { - Assert.notNull(auditActionType, "auditActionType must not be null."); - Assert.notNull(auditOperationType, "auditOperationType must not be null."); - - final AuditRecord auditRecord = new AuditRecord(); - auditRecord.setAuditAction(auditActionType); - auditRecord.setAuditOperation(auditOperationType); - auditRecord.setCorrelationId(correlationId); - auditRecord.setAuditData(data); - auditRecord.setPlatformName(platformName); - return this.auditRecordRepository.save(auditRecord); - } - - @Override - public AuditRecord populateAndSaveAuditRecordUsingMapData(AuditOperationType auditOperationType, - AuditActionType auditActionType, - String correlationId, Map data, String platformName) { - String dataAsString; - try { - dataAsString = objectMapper.writeValueAsString(data); - } - catch (JsonProcessingException e) { - logger.error("Error serializing audit record data. Data = " + data); - dataAsString = "Error serializing audit record data. Data = " + data; - } - return this.populateAndSaveAuditRecord(auditOperationType, auditActionType, correlationId, dataAsString, platformName); - } - - @Override - public Page findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate( - Pageable pageable, - AuditActionType[] actions, - AuditOperationType[] operations, - Instant fromDate, - Instant toDate) { - return this.auditRecordRepository.findByActionTypeAndOperationTypeAndDate(operations, actions, fromDate, toDate, - pageable); - } - - @Override - public Optional findById(Long id) { - return this.auditRecordRepository.findById(id); - } + private static final Logger logger = LoggerFactory.getLogger(DefaultAuditRecordService.class); + + private final AuditRecordRepository auditRecordRepository; + + private final ObjectMapper objectMapper; + + private final ArgumentSanitizer sanitizer; + + public DefaultAuditRecordService(AuditRecordRepository auditRecordRepository) { + + this(auditRecordRepository, null); + } + + public DefaultAuditRecordService(AuditRecordRepository auditRecordRepository, ObjectMapper objectMapper) { + + Assert.notNull(auditRecordRepository, "auditRecordRepository must not be null."); + this.auditRecordRepository = auditRecordRepository; + if (objectMapper == null) { + objectMapper = new ObjectMapper(); + objectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); + } + this.objectMapper = objectMapper; + this.sanitizer = new ArgumentSanitizer(); + } + + @Override + public AuditRecord populateAndSaveAuditRecord(AuditOperationType auditOperationType, + AuditActionType auditActionType, + String correlationId, String data, String platformName) { + + Assert.notNull(auditActionType, "auditActionType must not be null."); + Assert.notNull(auditOperationType, "auditOperationType must not be null."); + + final AuditRecord auditRecord = new AuditRecord(); + auditRecord.setAuditAction(auditActionType); + auditRecord.setAuditOperation(auditOperationType); + auditRecord.setCorrelationId(correlationId); + auditRecord.setAuditData(data); + auditRecord.setPlatformName(platformName); + return this.auditRecordRepository.save(auditRecord); + } + + @Override + public AuditRecord populateAndSaveAuditRecordUsingMapData( + AuditOperationType auditOperationType, + AuditActionType auditActionType, + String correlationId, Map data, + String platformName + ) { + + String dataAsString; + try { + Map sanitizedData = sanitizeMap(data); + dataAsString = objectMapper.writeValueAsString(sanitizedData); + } catch (JsonProcessingException e) { + logger.error("Error serializing audit record data. Data = " + data); + dataAsString = "Error serializing audit record data. Data = " + data; + } + return this.populateAndSaveAuditRecord(auditOperationType, auditActionType, correlationId, dataAsString, platformName); + } + + private Map sanitizeMap(Map data) { + + final Map result = new HashMap<>(); + data.forEach((k, v) -> result.put(k, sanitize(k, v))); + return result; + } + + private Object sanitize(String key, Object value) { + + if (value instanceof String) { + return sanitizer.sanitize(key, (String) value); + } else if (value instanceof Map) { + Map input = (Map) value; + return sanitizeMap(input); + } else { + return value; + } + } + + + @Override + public Page findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate( + Pageable pageable, + AuditActionType[] actions, + AuditOperationType[] operations, + Instant fromDate, + Instant toDate) { + + return this.auditRecordRepository.findByActionTypeAndOperationTypeAndDate(operations, actions, fromDate, toDate, + pageable); + } + + @Override + public Optional findById(Long id) { + + return this.auditRecordRepository.findById(id); + } } diff --git a/spring-cloud-dataflow-audit/src/test/java/org/springframework/cloud/dataflow/server/audit/service/DefaultAuditRecordServiceTests.java b/spring-cloud-dataflow-audit/src/test/java/org/springframework/cloud/dataflow/server/audit/service/DefaultAuditRecordServiceTests.java index 5b12a0bf03..8029ee0135 100644 --- a/spring-cloud-dataflow-audit/src/test/java/org/springframework/cloud/dataflow/server/audit/service/DefaultAuditRecordServiceTests.java +++ b/spring-cloud-dataflow-audit/src/test/java/org/springframework/cloud/dataflow/server/audit/service/DefaultAuditRecordServiceTests.java @@ -15,13 +15,16 @@ */ package org.springframework.cloud.dataflow.server.audit.service; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.springframework.cloud.dataflow.audit.repository.AuditRecordRepository; @@ -32,8 +35,10 @@ import org.springframework.cloud.dataflow.core.AuditRecord; import org.springframework.data.domain.PageRequest; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.isNull; @@ -45,218 +50,255 @@ /** * @author Gunnar Hillert + * @author Corneil du Plessis */ public class DefaultAuditRecordServiceTests { - private AuditRecordRepository auditRecordRepository; - - @Before - public void setupMock() { - this.auditRecordRepository = mock(AuditRecordRepository.class); - } - - @Test - public void testInitializationWithNullParameters() { - try { - new DefaultAuditRecordService(null); - } - catch (IllegalArgumentException e) { - assertEquals("auditRecordRepository must not be null.", e.getMessage()); - return; - } - fail("Expected an Exception to be thrown."); - } - - @Test - public void testPopulateAndSaveAuditRecord() { - final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); - auditRecordService.populateAndSaveAuditRecord(AuditOperationType.SCHEDULE, AuditActionType.CREATE, "1234", - "my data", "test-platform"); - - final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class); - verify(this.auditRecordRepository, times(1)).save(argument.capture()); - verifyNoMoreInteractions(this.auditRecordRepository); - - AuditRecord auditRecord = argument.getValue(); - - assertEquals(AuditActionType.CREATE, auditRecord.getAuditAction()); - assertEquals(AuditOperationType.SCHEDULE, auditRecord.getAuditOperation()); - assertEquals("1234", auditRecord.getCorrelationId()); - assertEquals("my data", auditRecord.getAuditData()); - assertEquals("test-platform", auditRecord.getPlatformName()); - } - - @Test - public void testPopulateAndSaveAuditRecordWithNullAuditActionType() { - final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); - - try { - auditRecordService.populateAndSaveAuditRecord(AuditOperationType.SCHEDULE, null, "1234", "my audit data", "test-platform"); - } - catch (IllegalArgumentException e) { - assertEquals("auditActionType must not be null.", e.getMessage()); - return; - } - fail("Expected an Exception to be thrown."); - } - - @Test - public void testPopulateAndSaveAuditRecordWithNullAuditOperationType() { - final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); - - try { - auditRecordService.populateAndSaveAuditRecord(null, AuditActionType.CREATE, "1234", "my audit data", "test-platform"); - } - catch (IllegalArgumentException e) { - assertEquals("auditOperationType must not be null.", e.getMessage()); - return; - } - fail("Expected an Exception to be thrown."); - } - - @Test - public void testPopulateAndSaveAuditRecordWithMapData() { - final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); - - final Map mapAuditData = new HashMap<>(2); - mapAuditData.put("foo1", "bar1"); - mapAuditData.put("foofoo", "barbar"); - - auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE, - "1234", mapAuditData, "test-platform"); - - final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class); - verify(this.auditRecordRepository, times(1)).save(argument.capture()); - verifyNoMoreInteractions(this.auditRecordRepository); - - final AuditRecord auditRecord = argument.getValue(); - - assertEquals(AuditActionType.CREATE, auditRecord.getAuditAction()); - assertEquals(AuditOperationType.SCHEDULE, auditRecord.getAuditOperation()); - assertEquals("1234", auditRecord.getCorrelationId()); - assertEquals("{\"foofoo\":\"barbar\",\"foo1\":\"bar1\"}", auditRecord.getAuditData()); - assertEquals("test-platform", auditRecord.getPlatformName()); - } - - @Test - public void testPopulateAndSaveAuditRecordUsingMapDataWithNullAuditActionType() { - final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); - - final Map mapAuditData = new HashMap<>(2); - mapAuditData.put("foo", "bar"); - - try { - auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, null, "1234", - mapAuditData, null); - } - catch (IllegalArgumentException e) { - assertEquals("auditActionType must not be null.", e.getMessage()); - return; - } - fail("Expected an Exception to be thrown."); - } - - @Test - public void testPopulateAndSaveAuditRecordUsingMapDataWithNullAuditOperationType() { - final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); - - final Map mapAuditData = new HashMap<>(2); - mapAuditData.put("foo", "bar"); - - try { - auditRecordService.populateAndSaveAuditRecordUsingMapData(null, AuditActionType.CREATE, "1234", - mapAuditData, null); - } - catch (IllegalArgumentException e) { - assertEquals("auditOperationType must not be null.", e.getMessage()); - return; - } - fail("Expected an Exception to be thrown."); - } - - @Test - public void testPopulateAndSaveAuditRecordUsingMapDataThrowingJsonProcessingException() - throws JsonProcessingException { - final ObjectMapper objectMapper = mock(ObjectMapper.class); - when(objectMapper.writeValueAsString(any(Object.class))).thenThrow(new JsonProcessingException("Error") { - private static final long serialVersionUID = 1L; - }); - - final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository, - objectMapper); - - final Map mapAuditData = new HashMap<>(2); - mapAuditData.put("foo", "bar"); - - auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE, - "1234", mapAuditData, "test-platform"); - - final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class); - verify(this.auditRecordRepository, times(1)).save(argument.capture()); - verifyNoMoreInteractions(this.auditRecordRepository); - - AuditRecord auditRecord = argument.getValue(); - - assertEquals(AuditActionType.CREATE, auditRecord.getAuditAction()); - assertEquals(AuditOperationType.SCHEDULE, auditRecord.getAuditOperation()); - assertEquals("1234", auditRecord.getCorrelationId()); - assertEquals("Error serializing audit record data. Data = {foo=bar}", auditRecord.getAuditData()); - assertEquals("test-platform", auditRecord.getPlatformName()); - } - - @Test - public void testFindAuditRecordByAuditOperationTypeAndAuditActionType() { - AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); - - AuditActionType[] auditActionTypes = { AuditActionType.CREATE }; - AuditOperationType[] auditOperationTypes = { AuditOperationType.STREAM }; - PageRequest pageRequest = PageRequest.of(0, 1); - auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, auditActionTypes, - auditOperationTypes, null, null); - - verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(eq(auditOperationTypes), - eq(auditActionTypes), isNull(), isNull(), eq(pageRequest)); - verifyNoMoreInteractions(this.auditRecordRepository); - } - - @Test - public void testFindAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullAuditActionType() { - AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); - - AuditOperationType[] auditOperationTypes = { AuditOperationType.STREAM }; - PageRequest pageRequest = PageRequest.of(0, 1); - auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, null, - auditOperationTypes, null, null); - - verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(eq(auditOperationTypes), - isNull(), isNull(), isNull(), eq(pageRequest)); - verifyNoMoreInteractions(this.auditRecordRepository); - } - - @Test - public void testFindAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullOperationType() { - AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); - - AuditActionType[] auditActionTypes = { AuditActionType.CREATE }; - PageRequest pageRequest = PageRequest.of(0, 1); - auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, auditActionTypes, - null, null, null); - - verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(isNull(), - eq(auditActionTypes), isNull(), isNull(), eq(pageRequest)); - verifyNoMoreInteractions(this.auditRecordRepository); - } - - @Test - public void testFindAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullActionAndOperationType() { - AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); - - PageRequest pageRequest = PageRequest.of(0, 1); - auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, null, null, null, - null); - - verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(isNull(), isNull(), - isNull(), isNull(), eq(pageRequest)); - verifyNoMoreInteractions(this.auditRecordRepository); - } + private AuditRecordRepository auditRecordRepository; + + @BeforeEach + public void setupMock() { + this.auditRecordRepository = mock(AuditRecordRepository.class); + } + + @Test + public void testInitializationWithNullParameters() { + try { + new DefaultAuditRecordService(null); + } catch (IllegalArgumentException e) { + assertEquals("auditRecordRepository must not be null.", e.getMessage()); + return; + } + fail("Expected an Exception to be thrown."); + } + + @Test + public void testPopulateAndSaveAuditRecord() { + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); + auditRecordService.populateAndSaveAuditRecord(AuditOperationType.SCHEDULE, AuditActionType.CREATE, "1234", + "my data", "test-platform"); + + final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class); + verify(this.auditRecordRepository, times(1)).save(argument.capture()); + verifyNoMoreInteractions(this.auditRecordRepository); + + AuditRecord auditRecord = argument.getValue(); + + assertEquals(AuditActionType.CREATE, auditRecord.getAuditAction()); + assertEquals(AuditOperationType.SCHEDULE, auditRecord.getAuditOperation()); + assertEquals("1234", auditRecord.getCorrelationId()); + assertEquals("my data", auditRecord.getAuditData()); + assertEquals("test-platform", auditRecord.getPlatformName()); + } + + @Test + public void testPopulateAndSaveAuditRecordWithNullAuditActionType() { + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); + + try { + auditRecordService.populateAndSaveAuditRecord(AuditOperationType.SCHEDULE, null, "1234", "my audit data", "test-platform"); + } catch (IllegalArgumentException e) { + assertEquals("auditActionType must not be null.", e.getMessage()); + return; + } + fail("Expected an Exception to be thrown."); + } + + @Test + public void testPopulateAndSaveAuditRecordWithNullAuditOperationType() { + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); + + try { + auditRecordService.populateAndSaveAuditRecord(null, AuditActionType.CREATE, "1234", "my audit data", "test-platform"); + } catch (IllegalArgumentException e) { + assertEquals("auditOperationType must not be null.", e.getMessage()); + return; + } + fail("Expected an Exception to be thrown."); + } + + @Test + public void testPopulateAndSaveAuditRecordWithMapData() throws JsonProcessingException { + final ObjectMapper mapper = new ObjectMapper(); + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository, mapper); + + final Map mapAuditData = new HashMap<>(2); + mapAuditData.put("foo1", "bar1"); + mapAuditData.put("foofoo", "barbar"); + + auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE, + "1234", mapAuditData, "test-platform"); + + final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class); + verify(this.auditRecordRepository, times(1)).save(argument.capture()); + verifyNoMoreInteractions(this.auditRecordRepository); + + final AuditRecord auditRecord = argument.getValue(); + + assertEquals(AuditActionType.CREATE, auditRecord.getAuditAction()); + assertEquals(AuditOperationType.SCHEDULE, auditRecord.getAuditOperation()); + assertEquals("1234", auditRecord.getCorrelationId()); + assertEquals(mapper.convertValue(mapAuditData, JsonNode.class), mapper.readTree(auditRecord.getAuditData())); + assertEquals("test-platform", auditRecord.getPlatformName()); + } + + @Test + public void testPopulateAndSaveAuditRecordUsingMapDataWithNullAuditActionType() { + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); + + final Map mapAuditData = new HashMap<>(2); + mapAuditData.put("foo", "bar"); + + try { + auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, null, "1234", + mapAuditData, null); + } catch (IllegalArgumentException e) { + assertEquals("auditActionType must not be null.", e.getMessage()); + return; + } + fail("Expected an Exception to be thrown."); + } + + @Test + public void testPopulateAndSaveAuditRecordUsingMapDataWithNullAuditOperationType() { + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); + + final Map mapAuditData = new HashMap<>(2); + mapAuditData.put("foo", "bar"); + + try { + auditRecordService.populateAndSaveAuditRecordUsingMapData(null, AuditActionType.CREATE, "1234", + mapAuditData, null); + } catch (IllegalArgumentException e) { + assertEquals("auditOperationType must not be null.", e.getMessage()); + return; + } + fail("Expected an Exception to be thrown."); + } + + @Test + public void testPopulateAndSaveAuditRecordUsingMapDataThrowingJsonProcessingException() + throws JsonProcessingException { + final ObjectMapper objectMapper = mock(ObjectMapper.class); + when(objectMapper.writeValueAsString(any(Object.class))).thenThrow(new JsonProcessingException("Error") { + private static final long serialVersionUID = 1L; + }); + + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository, + objectMapper); + + final Map mapAuditData = new HashMap<>(2); + mapAuditData.put("foo", "bar"); + + auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE, + "1234", mapAuditData, "test-platform"); + + final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class); + verify(this.auditRecordRepository, times(1)).save(argument.capture()); + verifyNoMoreInteractions(this.auditRecordRepository); + + AuditRecord auditRecord = argument.getValue(); + + assertEquals(AuditActionType.CREATE, auditRecord.getAuditAction()); + assertEquals(AuditOperationType.SCHEDULE, auditRecord.getAuditOperation()); + assertEquals("1234", auditRecord.getCorrelationId()); + assertEquals("test-platform", auditRecord.getPlatformName()); + assertEquals("Error serializing audit record data. Data = {foo=bar}", auditRecord.getAuditData()); + + + } + + @Test + public void testPopulateAndSaveAuditRecordUsingSensitiveMapData() { + final ObjectMapper objectMapper = new ObjectMapper(); + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository, objectMapper); + + final Map mapAuditData = new HashMap<>(2); + mapAuditData.put("foo", "bar"); + mapAuditData.put("spring.cloud.config.password", "12345"); + final Map child = new HashMap<>(); + child.put("password", "54321"); + child.put("bar1", "foo2"); + mapAuditData.put("spring.child", child); + mapAuditData.put("spring.empty", Collections.emptyMap()); + + auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE, + "1234", mapAuditData, "test-platform"); + + final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class); + verify(this.auditRecordRepository, times(1)).save(argument.capture()); + verifyNoMoreInteractions(this.auditRecordRepository); + + AuditRecord auditRecord = argument.getValue(); + + assertEquals(AuditActionType.CREATE, auditRecord.getAuditAction()); + assertEquals(AuditOperationType.SCHEDULE, auditRecord.getAuditOperation()); + assertEquals("1234", auditRecord.getCorrelationId()); + + assertEquals("test-platform", auditRecord.getPlatformName()); + System.out.println("auditData=" + auditRecord.getAuditData()); + assertTrue(auditRecord.getAuditData().contains("\"******\"")); + assertTrue(auditRecord.getAuditData().contains("\"bar\"")); + assertTrue(auditRecord.getAuditData().contains("\"foo\"")); + assertTrue(auditRecord.getAuditData().contains("\"spring.cloud.config.password\"")); + assertTrue(auditRecord.getAuditData().contains("\"password\"")); + assertFalse(auditRecord.getAuditData().contains("54321")); + assertFalse(auditRecord.getAuditData().contains("12345")); + } + + @Test + public void testFindAuditRecordByAuditOperationTypeAndAuditActionType() { + AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); + + AuditActionType[] auditActionTypes = {AuditActionType.CREATE}; + AuditOperationType[] auditOperationTypes = {AuditOperationType.STREAM}; + PageRequest pageRequest = PageRequest.of(0, 1); + auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, auditActionTypes, + auditOperationTypes, null, null); + + verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(eq(auditOperationTypes), + eq(auditActionTypes), isNull(), isNull(), eq(pageRequest)); + verifyNoMoreInteractions(this.auditRecordRepository); + } + + @Test + public void testFindAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullAuditActionType() { + AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); + + AuditOperationType[] auditOperationTypes = {AuditOperationType.STREAM}; + PageRequest pageRequest = PageRequest.of(0, 1); + auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, null, + auditOperationTypes, null, null); + + verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(eq(auditOperationTypes), + isNull(), isNull(), isNull(), eq(pageRequest)); + verifyNoMoreInteractions(this.auditRecordRepository); + } + + @Test + public void testFindAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullOperationType() { + AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); + + AuditActionType[] auditActionTypes = {AuditActionType.CREATE}; + PageRequest pageRequest = PageRequest.of(0, 1); + auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, auditActionTypes, + null, null, null); + + verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(isNull(), + eq(auditActionTypes), isNull(), isNull(), eq(pageRequest)); + verifyNoMoreInteractions(this.auditRecordRepository); + } + + @Test + public void testFindAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullActionAndOperationType() { + AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); + + PageRequest pageRequest = PageRequest.of(0, 1); + auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, null, null, null, + null); + + verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(isNull(), isNull(), + isNull(), isNull(), eq(pageRequest)); + verifyNoMoreInteractions(this.auditRecordRepository); + } } diff --git a/spring-cloud-dataflow-autoconfigure/pom.xml b/spring-cloud-dataflow-autoconfigure/pom.xml index 87fb52b0ac..f67bca5186 100644 --- a/spring-cloud-dataflow-autoconfigure/pom.xml +++ b/spring-cloud-dataflow-autoconfigure/pom.xml @@ -1,15 +1,21 @@ - + 4.0.0 org.springframework.cloud spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-autoconfigure jar spring-cloud-dataflow-autoconfigure Data Flow Autoconfig + + true + 3.4.1 + org.springframework.boot @@ -18,10 +24,12 @@ org.springframework.cloud spring-cloud-dataflow-server-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-platform-kubernetes + ${project.version} io.fabric8 @@ -30,6 +38,7 @@ org.springframework.cloud spring-cloud-dataflow-platform-cloudfoundry + ${project.version} org.springframework.cloud @@ -52,6 +61,11 @@ spring-boot-starter-test test + + com.h2database + h2 + test + @@ -67,7 +81,37 @@ org.apache.maven.plugins maven-resources-plugin + 3.3.1 + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + diff --git a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java index ac36cc18c5..b1154712a8 100644 --- a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java +++ b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java @@ -18,8 +18,6 @@ import io.pivotal.reactor.scheduler.ReactorSchedulerClient; import org.cloudfoundry.operations.CloudFoundryOperations; -import org.junit.runner.RunWith; -import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration; @@ -30,6 +28,9 @@ import org.springframework.boot.cloud.CloudPlatform; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskDeploymentReader; +import org.springframework.cloud.dataflow.registry.service.AppRegistryService; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryConnectionProperties; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeployerAutoConfiguration; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeploymentProperties; @@ -39,12 +40,13 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +import static org.mockito.Mockito.mock; /** * @author Christian Tzolov + * @author Corneil du Plessis */ -@RunWith(SpringJUnit4ClassRunner.class) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, classes = AbstractSchedulerPerPlatformTest.AutoConfigurationApplication.class) @DirtiesContext @@ -54,10 +56,24 @@ public abstract class AbstractSchedulerPerPlatformTest { protected ApplicationContext context; @Configuration - @EnableAutoConfiguration(exclude = { LocalDataFlowServerAutoConfiguration.class, + @EnableAutoConfiguration(exclude = {LocalDataFlowServerAutoConfiguration.class, CloudFoundryDeployerAutoConfiguration.class, SecurityAutoConfiguration.class, - SecurityFilterAutoConfiguration.class, ManagementWebSecurityAutoConfiguration.class }) + SecurityFilterAutoConfiguration.class, ManagementWebSecurityAutoConfiguration.class}) public static class AutoConfigurationApplication { + @Bean + public AppRegistryService appRegistryService() { + return mock(AppRegistryService.class); + } + + @Bean + public TaskDefinitionReader taskDefinitionReader() { + return mock(TaskDefinitionReader.class); + } + + @Bean + public TaskDeploymentReader taskDeploymentReader() { + return mock(TaskDeploymentReader.class); + } @Configuration @ConditionalOnCloudPlatform(CloudPlatform.CLOUD_FOUNDRY) @@ -68,25 +84,25 @@ public static class CloudFoundryMockConfig { @Bean @Primary public ReactorSchedulerClient reactorSchedulerClient() { - return Mockito.mock(ReactorSchedulerClient.class); + return mock(ReactorSchedulerClient.class); } @Bean @Primary public CloudFoundryOperations cloudFoundryOperations() { - return Mockito.mock(CloudFoundryOperations.class); + return mock(CloudFoundryOperations.class); } @Bean @Primary public CloudFoundryConnectionProperties cloudFoundryConnectionProperties() { - return Mockito.mock(CloudFoundryConnectionProperties.class); + return mock(CloudFoundryConnectionProperties.class); } @Bean @Primary public CloudFoundryTaskLauncher CloudFoundryTaskLauncher() { - return Mockito.mock(CloudFoundryTaskLauncher.class); + return mock(CloudFoundryTaskLauncher.class); } } } diff --git a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/ProfileApplicationListenerTest.java b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/ProfileApplicationListenerTest.java index 31eb81ea97..e55838dd2e 100644 --- a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/ProfileApplicationListenerTest.java +++ b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/ProfileApplicationListenerTest.java @@ -16,11 +16,11 @@ package org.springframework.cloud.dataflow.autoconfigure.local; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.boot.context.event.ApplicationEnvironmentPreparedEvent; import org.springframework.cloud.dataflow.server.config.cloudfoundry.CloudFoundryCloudProfileProvider; @@ -35,8 +35,9 @@ * {@link ProfileApplicationListener} test cases * * @author Chris Schaefer + * @author Corneil du Plessis */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class ProfileApplicationListenerTest { private MockEnvironment environment; @@ -46,7 +47,7 @@ public class ProfileApplicationListenerTest { private ProfileApplicationListener profileApplicationListener; - @Before + @BeforeEach public void before() { environment = new MockEnvironment(); when(event.getEnvironment()).thenReturn(environment); diff --git a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/SchedulerPerPlatformTest.java b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/SchedulerPerPlatformTest.java index 580c69fe8f..1860da0043 100644 --- a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/SchedulerPerPlatformTest.java +++ b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/SchedulerPerPlatformTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,9 +16,8 @@ package org.springframework.cloud.dataflow.autoconfigure.local; -import org.junit.Test; -import org.junit.experimental.runners.Enclosed; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.boot.cloud.CloudPlatform; @@ -26,34 +25,41 @@ import org.springframework.cloud.deployer.spi.scheduler.Scheduler; import org.springframework.test.context.TestPropertySource; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * @author Christian Tzolov + * @author Corneil du Plessis */ -@RunWith(Enclosed.class) + public class SchedulerPerPlatformTest { - @TestPropertySource(properties = { "spring.cloud.dataflow.features.schedules-enabled=false" }) - public static class AllSchedulerDisabledTests extends AbstractSchedulerPerPlatformTest { + @Nested + @TestPropertySource(properties = {"spring.cloud.dataflow.features.schedules-enabled=false"}) + public class AllSchedulerDisabledTests extends AbstractSchedulerPerPlatformTest { - @Test(expected = NoSuchBeanDefinitionException.class) + @Test public void testLocalSchedulerEnabled() { - assertFalse(context.getEnvironment().containsProperty("kubernetes_service_host")); - assertFalse(CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())); - context.getBean(Scheduler.class); + assertThrows(NoSuchBeanDefinitionException.class, () -> { + assertFalse(context.getEnvironment().containsProperty("kubernetes_service_host")); + assertFalse(CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())); + context.getBean(Scheduler.class); + }); } } - @TestPropertySource(properties = { "spring.cloud.dataflow.features.schedules-enabled=true" }) - public static class LocalSchedulerTests extends AbstractSchedulerPerPlatformTest { + @Nested + @TestPropertySource(properties = {"spring.cloud.dataflow.features.schedules-enabled=true"}) + public class LocalSchedulerTests extends AbstractSchedulerPerPlatformTest { @Test public void testLocalSchedulerEnabled() { - assertFalse("K8s should be disabled", context.getEnvironment().containsProperty("kubernetes_service_host")); - assertFalse("CF should be disabled", CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())); + assertFalse(context.getEnvironment().containsProperty("kubernetes_service_host"), "K8s should be disabled"); + assertFalse(CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment()), "CF should be disabled"); Scheduler scheduler = context.getBean(Scheduler.class); @@ -62,14 +68,15 @@ public void testLocalSchedulerEnabled() { } } - @TestPropertySource(properties = { "spring.cloud.dataflow.features.schedules-enabled=true", - "kubernetes_service_host=dummy" }) - public static class KubernetesSchedulerActivatedTests extends AbstractSchedulerPerPlatformTest { + @Nested + @TestPropertySource(properties = {"spring.cloud.dataflow.features.schedules-enabled=true", + "kubernetes_service_host=dummy", "spring.cloud.kubernetes.client.namespace=default"}) + public class KubernetesSchedulerActivatedTests extends AbstractSchedulerPerPlatformTest { @Test public void testKubernetesSchedulerEnabled() { - assertTrue("K8s should be enabled", context.getEnvironment().containsProperty("kubernetes_service_host")); - assertFalse("CF should be disabled", CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())); + assertTrue(context.getEnvironment().containsProperty("kubernetes_service_host"), "K8s should be enabled"); + assertFalse(CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment()), "CF should be disabled"); KubernetesSchedulerProperties props = context.getBean(KubernetesSchedulerProperties.class); @@ -78,14 +85,16 @@ public void testKubernetesSchedulerEnabled() { } - @TestPropertySource(properties = { "spring.cloud.dataflow.features.schedules-enabled=true", - "VCAP_APPLICATION=\"{\"instance_id\":\"123\"}\"" }) - public static class CloudFoundrySchedulerActivatedTests extends AbstractSchedulerPerPlatformTest { + @Nested + @TestPropertySource(properties = {"spring.cloud.dataflow.features.schedules-enabled=true", + "VCAP_APPLICATION=\"{\"instance_id\":\"123\"}\""}) + public class CloudFoundrySchedulerActivatedTests extends AbstractSchedulerPerPlatformTest { @Test - public void testCloudFoundryScheudlerEnabled() { - assertFalse("K8s should be disabled", context.getEnvironment().containsProperty("kubernetes_service_host")); - assertTrue("CF should be enabled", CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())); + public void testCloudFoundrySchedulerEnabled() { + assertFalse(context.getEnvironment() + .containsProperty("kubernetes_service_host"), "K8s should be disabled"); + assertTrue(CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment()), "CF should be enabled"); } } diff --git a/spring-cloud-dataflow-build/README.md b/spring-cloud-dataflow-build/README.md new file mode 100644 index 0000000000..7c459aa9a3 --- /dev/null +++ b/spring-cloud-dataflow-build/README.md @@ -0,0 +1 @@ +# spring-cloud-dataflow-build diff --git a/spring-cloud-dataflow-build/pom.xml b/spring-cloud-dataflow-build/pom.xml new file mode 100644 index 0000000000..b1830ade94 --- /dev/null +++ b/spring-cloud-dataflow-build/pom.xml @@ -0,0 +1,741 @@ + + + 4.0.0 + org.springframework.cloud + spring-cloud-dataflow-build + 2.11.6-SNAPSHOT + pom + Spring Cloud Dataflow Build + Spring Cloud Dataflow Build, managing plugins and dependencies + + spring-cloud-dataflow-build-dependencies + spring-cloud-dataflow-dependencies-parent + spring-cloud-dataflow-build-tools + + https://spring.io/projects/spring-cloud-dataflow + + 1.8 + @ + UTF-8 + UTF-8 + ${basedir} + ${project.artifactId} + + 2.7.18 + + 5.3.39 + 2021.0.9 + 5.7.12 + 2.1.15 + 2.11.6-SNAPSHOT + ${project.build.directory}/build-docs + ${project.build.directory}/refdocs/ + 0.1.3.RELEASE + 2.3.7 + 2.2.9 + ${project.version} + deploy + ${project.version} + 2.5.7 + 9.2.7.0 + + + jacoco + reuseReports + ${project.basedir}/../target/jacoco.exec + java + + + 3.11.0 + 2.10 + + ${maven-checkstyle-plugin.version} + 8.29 + 0.0.9 + 3.2.3 + 3.4.1 + 3.0.1 + 3.3.0 + 1.8 + 3.2.3 + 1.6.0 + 3.3.1 + 4.9.9 + 3.1.1 + 3.0.0-M2 + 1.6 + 3.1.1 + 2.2.4 + 3.0.0 + 2.2.1 + 1.20 + 0.0.7 + 3.1.0 + false + true + true + true + main + + https://raw.githubusercontent.com/spring-cloud/spring-cloud-dataflow-build/${spring-cloud-build-checkstyle.branch}/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions.xml + + + https://raw.githubusercontent.com/spring-cloud/spring-cloud-dataflow-build/${spring-cloud-build-checkstyle.branch}/spring-cloud-dataflow-build-tools/src/checkstyle/nohttp-checkstyle.xml + + + ${project.basedir}/../src/checkstyle/checkstyle-suppressions.xml + + 0.0.2.RELEASE + true + 3.2.10 + 1.8.1 + ${project.basedir}/src/main/asciidoc/_configprops.adoc + .* + generate-resources + generate-resources + + slow,docker + + + + + org.springframework.cloud + spring-cloud-dataflow-build-dependencies + 2.11.6-SNAPSHOT + pom + import + + + + + + + + Pivotal Software, Inc. + https://www.spring.io + + + + Apache License, Version 2.0 + https://www.apache.org/licenses/LICENSE-2.0 + + + + https://github.com/spring-cloud/spring-cloud-dataflow-build + scm:git:git://github.com/spring-cloud/spring-cloud-dataflow-build.git + + + scm:git:ssh://git@github.com/spring-cloud/spring-cloud-dataflow-build.git + + HEAD + + + + scdf-team + Data Flow Team + https://github.com/spring-cloud/spring-cloud-dataflow/graphs/contributors + + + + + + + ${basedir}/src/main/resources + true + + **/application*.yml + **/application*.properties + + + + ${basedir}/src/main/resources + + **/application*.yml + **/application*.properties + + + + + + ${basedir}/src/test/resources + true + + **/application*.yml + **/application*.properties + + + + ${basedir}/src/test/resources + + **/application*.yml + **/application*.properties + + + + + + + + org.apache.maven.plugins + maven-eclipse-plugin + ${maven-eclipse-plugin.version} + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + + org.apache.maven.plugins + maven-failsafe-plugin + ${maven-failsafe-plugin.version} + + ${groups} + ${excludedGroups} + + **/Abstract*.* + + + + + + integration-test + verify + + + + + + org.apache.maven.plugins + maven-jar-plugin + ${maven-jar-plugin.version} + + + + ${start-class} + true + + + + + + org.apache.maven.plugins + maven-antrun-plugin + ${maven-antrun-plugin.version} + + + org.apache.maven.plugins + maven-surefire-plugin + 3.2.5 + + true + 1 + 1 + + **/*Tests.java + **/*Test.java + + + **/Abstract*.java + + ${groups} + ${excludedGroups} + + + + org.codehaus.mojo + exec-maven-plugin + ${exec-maven-plugin.version} + + ${start-class} + + + + org.apache.maven.plugins + maven-resources-plugin + ${maven-resources-plugin.version} + + + ${resource.delimiter} + + false + + + + io.spring.javaformat + spring-javaformat-maven-plugin + ${spring-javaformat.version} + + + validate + + ${disable.checks} + + + apply + validate + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + ${maven-checkstyle-plugin.version} + + + com.puppycrawl.tools + checkstyle + ${puppycrawl-tools-checkstyle.version} + + + io.spring.javaformat + spring-javaformat-checkstyle + ${spring-javaformat-checkstyle.version} + + + org.springframework.cloud + spring-cloud-dataflow-build-tools + ${project.version} + + + io.spring.nohttp + nohttp-checkstyle + ${nohttp-checkstyle.version} + + + + + checkstyle-validation + validate + true + + ${disable.checks} + checkstyle.xml + checkstyle-header.txt + + checkstyle.build.directory=${project.build.directory} + checkstyle.suppressions.file=${checkstyle.suppressions.file} + checkstyle.additional.suppressions.file=${checkstyle.additional.suppressions.file} + + true + + + ${maven-checkstyle-plugin.includeTestSourceDirectory} + + ${maven-checkstyle-plugin.failsOnError} + + + ${maven-checkstyle-plugin.failOnViolation} + + + + check + + + + no-http-checkstyle-validation + validate + true + + ${disable.nohttp.checks} + ${checkstyle.nohttp.file} + **/* + **/.idea/**/*,**/.git/**/*,**/target/**/*,**/*.log + ./ + + + check + + + + + + io.github.git-commit-id + git-commit-id-maven-plugin + ${git-commit-id-plugin.version} + + + + revision + + + + + true + + ${project.build.outputDirectory}/git.properties + + full + + + + + org.springframework.boot + spring-boot-maven-plugin + ${spring-boot.version} + + + + repackage + + + + + ${start-class} + + + + org.apache.maven.plugins + maven-enforcer-plugin + ${maven-enforcer-plugin.version} + + + enforce-versions + + enforce + + + + + false + + + + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + + org.apache.maven.plugins + + + maven-checkstyle-plugin + + + [2.17,) + + + check + + + + + + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + org.apache.maven.plugins + maven-surefire-report-plugin + 3.0.0 + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + ${java.version} + ${java.version} + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + false + true + + + + org.apache.maven.plugins + maven-source-plugin + ${maven-source-plugin.version} + + + attach-sources + + jar + + package + + + + + + + + + org.apache.maven.plugins + maven-surefire-report-plugin + 3.0.0 + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + non-aggregate + + + + javadoc + + + + aggregate + + + + aggregate + + + + + + + + + spring + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + true + + + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + + + milestone + + + repo.spring.io + Spring Milestone Repository + https://repo.spring.io/libs-milestone-local + + + + + java8 + + [1.8,) + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + none + + + + + + + sonar + + false + + + + + org.jacoco + jacoco-maven-plugin + 0.8.8 + + ${sonar.jacoco.reportPath} + true + + + + agent + + prepare-agent + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + true + 1 + 1 + + + listener + org.sonar.java.jacoco.JUnitListener + + + + + + + + + + + + + + + + + license + + true + + + + + org.codehaus.mojo + license-maven-plugin + ${license-maven-plugin.version} + + + aggregate-licenses + + license:aggregate-add-third-party + + + + + + + + + fast + + true + + + + failsafe + + + + org.apache.maven.plugins + maven-failsafe-plugin + ${maven-failsafe-plugin.version} + + + **/Abstract*.* + + + + + + + + checkstyle + + + + org.apache.maven.plugins + maven-checkstyle-plugin + + + + + + diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-dependencies/pom.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-dependencies/pom.xml new file mode 100644 index 0000000000..38665dee56 --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-dependencies/pom.xml @@ -0,0 +1,270 @@ + + + 4.0.0 + org.springframework.cloud + spring-cloud-dataflow-build-dependencies + 2.11.6-SNAPSHOT + Spring Cloud Dataflow Build Dependencies + pom + Spring Cloud Dataflow Build Dependencies: an internal BOM for use with Spring + Cloud Dataflow projects. Use as a BOM or by inheriting from the spring-cloud-dataflow-build. + + + org.springframework.boot + spring-boot-dependencies + + 2.7.18 + + + + UTF-8 + + 2.7.18 + 5.3.39 + 2021.0.9 + 5.7.12 + 2.1.13 + 1.10.0 + 1.19.8 + + 9.0.93 + 1.78.1 + 2.9.13 + 4.1.113.Final + 2020.0.47 + 1.1.4 + 1.33 + 2.17.2 + 2.4.11 + 9.37.3 + 1.1.10.6 + 1.26.1 + 2.15.1 + 42.7.2 + 1.5.2 + 2.3.0 + 3.5.4 + 5.12.4 + 4.13.2 + 4.11.0 + 1.2.13 + 2.9.0 + 32.1.3-jre + + + + + com.google.guava + guava + ${guava.version} + + + junit + junit + 4.13.2 + + + net.minidev + json-smart + ${json-smart.version} + + + com.nimbusds + nimbus-jose-jwt + ${nimbus-jose-jwt.version} + + + org.yaml + snakeyaml + ${snakeyaml.version} + + + org.xerial.snappy + snappy-java + ${snappy-java.version} + + + org.apache.commons + commons-compress + ${commons-compress.version} + + + commons-io + commons-io + ${commons-io.version} + + + org.testcontainers + testcontainers-bom + ${testcontainers.version} + + + org.apache.commons + commons-compress + + + pom + import + + + io.fabric8 + kubernetes-client-bom + ${kubernetes-fabric8-client.version} + pom + import + + + org.springframework + spring-framework-bom + ${spring-framework.version} + pom + import + + + org.springframework.security + spring-security-bom + ${spring-security.version} + pom + import + + + org.springframework.cloud + spring-cloud-dependencies + ${spring-cloud.version} + pom + import + + + org.springframework.shell + spring-shell-dependencies + ${spring-shell.version} + pom + import + + + org.apache.commons + commons-text + ${commons-text.version} + + + org.postgresql + postgresql + ${postgresql.version} + + + io.micrometer.prometheus + prometheus-rsocket-spring + ${prometheus-rsocket.version} + + + io.micrometer.prometheus + prometheus-rsocket-client + ${prometheus-rsocket.version} + + + io.pivotal.cfenv + java-cfenv + ${java-cfenv.version} + + + io.pivotal.cfenv + java-cfenv-boot + ${java-cfenv.version} + + + io.pivotal.cfenv + java-cfenv-boot-pivotal-scs + ${java-cfenv.version} + + + io.pivotal.cfenv + java-cfenv-boot-pivotal-sso + ${java-cfenv.version} + + + io.pivotal.cfenv + java-cfenv-jdbc + ${java-cfenv.version} + + + io.pivotal.spring.cloud + spring-cloud-services-starter-config-client + ${spring-cloud-services-starter-config-client.version} + + + + org.bouncycastle + bcprov-jdk18on + ${bouncycastle.version} + + + org.bouncycastle + bcpkix-jdk18on + ${bouncycastle.version} + + + org.bouncycastle + bcutil-jdk18on + ${bouncycastle.version} + + + + + + spring + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + true + + + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + + + diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/pom.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/pom.xml new file mode 100644 index 0000000000..9427c63b72 --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/pom.xml @@ -0,0 +1,40 @@ + + + 4.0.0 + spring-cloud-dataflow-build-tools + spring-cloud-dataflow-build-tools + jar + Spring Cloud Dataflow Build Tools + + org.springframework.cloud + spring-cloud-dataflow-build + 2.11.6-SNAPSHOT + + + + com.puppycrawl.tools + checkstyle + ${puppycrawl-tools-checkstyle.version} + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-surefire-plugin + + false + + + + + diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions-empty.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions-empty.xml new file mode 100644 index 0000000000..6cb6ad2669 --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions-empty.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions.xml new file mode 100644 index 0000000000..f5f6705862 --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/nohttp-checkstyle.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/nohttp-checkstyle.xml new file mode 100644 index 0000000000..4e21a0bdf5 --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/nohttp-checkstyle.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle-header.txt b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle-header.txt new file mode 100644 index 0000000000..ff707f0f9e --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle-header.txt @@ -0,0 +1,15 @@ +^\Q/*\E$ +^\Q * Copyright \E(20\d\d\-)?20\d\d\Q the original author or authors.\E$ +^\Q *\E$ +^\Q * Licensed under the Apache License, Version 2.0 (the "License");\E$ +^\Q * you may not use this file except in compliance with the License.\E$ +^\Q * You may obtain a copy of the License at\E$ +^\Q *\E$ +^\Q * https://www.apache.org/licenses/LICENSE-2.0\E$ +^\Q *\E$ +^\Q * Unless required by applicable law or agreed to in writing, software\E$ +^\Q * distributed under the License is distributed on an "AS IS" BASIS,\E$ +^\Q * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\E$ +^\Q * See the License for the specific language governing permissions and\E$ +^\Q * limitations under the License.\E$ +^\Q */\E$ diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle.xml new file mode 100644 index 0000000000..ff46fb9e86 --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle.xml @@ -0,0 +1,203 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-dependencies-parent/pom.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-dependencies-parent/pom.xml new file mode 100644 index 0000000000..cb110367c4 --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-dependencies-parent/pom.xml @@ -0,0 +1,124 @@ + + + 4.0.0 + org.springframework.cloud + 2.11.6-SNAPSHOT + spring-cloud-dataflow-dependencies-parent + pom + Spring Cloud Dataflow Dependencies Parent + Spring Cloud Data Flow Build Dependencies + https://projects.spring.io/spring-cloud/ + + Pivotal Software, Inc. + https://www.spring.io + + + + Apache License, Version 2.0 + https://www.apache.org/licenses/LICENSE-2.0 + + + + https://github.com/spring-cloud/spring-cloud-dataflow-build + scm:git:git://github.com/spring-cloud/spring-cloud-dataflow-build.git + + + scm:git:ssh://git@github.com/spring-cloud/spring-cloud-dataflow-build.git + + HEAD + + + + scdf-team + Data Flow Team + https://github.com/spring-cloud/spring-cloud-dataflow/graphs/contributors + + + + UTF-8 + + 2.11.6-SNAPSHOT + + + + spring + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + true + + + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + + + + + + + org.apache.maven.plugins + maven-enforcer-plugin + 1.4.1 + + + enforce-versions + + enforce + + + + + false + + + + + + + + + diff --git a/spring-cloud-dataflow-classic-docs/pom.xml b/spring-cloud-dataflow-classic-docs/pom.xml index 6fc37f6ab8..634404b43b 100644 --- a/spring-cloud-dataflow-classic-docs/pom.xml +++ b/spring-cloud-dataflow-classic-docs/pom.xml @@ -4,7 +4,8 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-classic-docs Spring Cloud Data Flow Docs for Classic mode @@ -13,38 +14,42 @@ org.springframework.cloud spring-cloud-dataflow-configuration-metadata + ${project.version} org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-registry + ${project.version} org.springframework.cloud spring-cloud-dataflow-rest-resource + ${project.version} org.springframework.cloud spring-cloud-dataflow-server-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-rest-client - - - org.springframework.cloud - spring-cloud-dataflow-shell-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-completion + ${project.version} org.springframework.cloud spring-cloud-starter-dataflow-server + ${project.version} test-jar test @@ -73,7 +78,33 @@ spring-boot-starter-test test - + + com.h2database + h2 + test + + + org.awaitility + awaitility + test + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + false + + + + org.apache.maven.plugins + maven-jar-plugin + 3.3.0 + + + full @@ -83,6 +114,9 @@ org.apache.maven.plugins maven-surefire-plugin + 1 + 1 + true false **/*Documentation.java diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AboutDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AboutDocumentation.java index 4fb34a5ec9..a6bc537c99 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AboutDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AboutDocumentation.java @@ -16,7 +16,7 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.http.MediaType; import org.springframework.restdocs.payload.JsonFieldType; @@ -24,17 +24,24 @@ import static org.springframework.restdocs.payload.PayloadDocumentation.fieldWithPath; import static org.springframework.restdocs.payload.PayloadDocumentation.responseFields; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * @author Gunnar Hillert * @author Ilayaperumal Gopinathan + * @author Chris Bono + * @author Corneil du Plessis */ +@SuppressWarnings("NewClassNamingConvention") public class AboutDocumentation extends BaseDocumentation { @Test public void getMetaInformation() throws Exception { - this.mockMvc.perform(get("/about").accept(MediaType.APPLICATION_JSON)).andExpect(status().isOk()) + this.mockMvc.perform( + get("/about") + .accept(MediaType.APPLICATION_JSON)) + .andExpect(status().isOk()) .andDo(this.documentationHandler.document(responseFields( fieldWithPath("_links.self.href").description("Link to the runtime environment resource"), @@ -156,8 +163,36 @@ public void getMetaInformation() throws Exception { fieldWithPath("monitoringDashboardInfo.source").type(JsonFieldType.STRING).description( "Unique DataFlow identifier within the monitoring system."), fieldWithPath("monitoringDashboardInfo.refreshInterval").type(JsonFieldType.NUMBER).description( - "Provides the time interval (in seconds) for updating the monitoring dashboards.") + "Provides the time interval (in seconds) for updating the monitoring dashboards."), + fieldWithPath("gitAndBuildInfo").type(JsonFieldType.OBJECT).description( + "Provides the git and build info for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.git").type(JsonFieldType.OBJECT).description( + "Provides the git details for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.git.branch").type(JsonFieldType.STRING).description( + "Provides the git branch for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.git.commit").type(JsonFieldType.OBJECT).description( + "Provides the git commit info for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.git.commit.id").type(JsonFieldType.OBJECT).description( + "Provides the git commit id for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.git.commit.id.abbrev").type(JsonFieldType.STRING).description( + "Provides the short git commit id for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.git.commit.id.full").type(JsonFieldType.STRING).description( + "Provides the full git commit id for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.git.commit.time").type(JsonFieldType.STRING).description( + "Provides the git commit time for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.build").type(JsonFieldType.OBJECT).description( + "Provides the build details for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.build.artifact").type(JsonFieldType.STRING).description( + "Provides the build artifact for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.build.name").type(JsonFieldType.STRING).description( + "Provides the build name for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.build.time").type(JsonFieldType.STRING).description( + "Provides the build time for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.build.version").type(JsonFieldType.STRING).description( + "Provides the build version for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.build.group").type(JsonFieldType.STRING).description( + "Provides the build group for the Dataflow server") ))); } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java index 2ffb62a063..5d07a4c7f8 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java @@ -18,7 +18,7 @@ import javax.servlet.RequestDispatcher; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.rest.Version; import org.springframework.restdocs.payload.JsonFieldType; @@ -40,7 +40,9 @@ * @author Gunnar Hillert * @author Christian Tzolov * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ +@SuppressWarnings("NewClassNamingConvention") public class ApiDocumentation extends BaseDocumentation { @Test @@ -57,7 +59,6 @@ public void errors() throws Exception { .requestAttr(RequestDispatcher.ERROR_REQUEST_URI, "/apps").requestAttr( RequestDispatcher.ERROR_MESSAGE, "The app 'http://localhost:8080/apps/123' does " + "not exist")) - .andDo(print()) .andExpect(status().isBadRequest()).andExpect(jsonPath("error", is("Bad Request"))) .andExpect(jsonPath("timestamp", is(notNullValue()))).andExpect(jsonPath("status", is(400))) .andExpect(jsonPath("path", is(notNullValue()))) @@ -73,7 +74,9 @@ public void errors() throws Exception { @Test public void index() throws Exception { - this.mockMvc.perform(get("/")).andDo(print()).andExpect(status().isOk()).andDo(this.documentationHandler.document(links( + this.mockMvc.perform(get("/")) + .andExpect(status().isOk()) + .andDo(this.documentationHandler.document(links( linkWithRel("about").description( "Access meta information, including enabled " + "features, security info, version information"), @@ -103,11 +106,15 @@ public void index() throws Exception { linkWithRel("runtime/apps/{appId}").description("Exposes the runtime status for a specific app"), linkWithRel("runtime/apps/{appId}/instances").description("Provides the status for app instances"), linkWithRel("runtime/apps/{appId}/instances/{instanceId}").description("Provides the status for specific app instance"), + linkWithRel("runtime/apps/{appId}/instances/{instanceId}/actuator").description("EXPERIMENTAL: Allows invoking Actuator endpoint on specific app instance"), + linkWithRel("runtime/apps/{appId}/instances/{instanceId}/post").description("EXPERIMENTAL: Allows POST on http sink"), linkWithRel("tasks/definitions").description("Provides the task definition resource"), linkWithRel("tasks/definitions/definition").description("Provides details for a specific task definition"), linkWithRel("tasks/validation").description("Provides the validation for a task definition"), - linkWithRel("tasks/executions").description("Returns Task executions and allows launching of tasks"), + linkWithRel("tasks/executions").description("Returns Task executions"), + linkWithRel("tasks/executions/launch").description("Provides for launching a Task execution"), + linkWithRel("tasks/executions/external").description("Returns Task execution by external id"), linkWithRel("tasks/executions/current").description("Provides the current count of running tasks"), linkWithRel("tasks/info/executions").description("Provides the task executions info"), linkWithRel("tasks/schedules").description("Provides schedule information of tasks"), @@ -116,6 +123,11 @@ public void index() throws Exception { linkWithRel("tasks/executions/execution").description("Provides details for a specific task execution"), linkWithRel("tasks/platforms").description("Provides platform accounts for launching tasks. The results can be filtered to show the platforms that support scheduling by adding a request parameter of 'schedulesEnabled=true"), linkWithRel("tasks/logs").description("Retrieve the task application log"), + linkWithRel("tasks/thinexecutions").description("Returns thin Task executions"), + linkWithRel("tasks/thinexecutions/name").description("Returns all thin Task executions for a given Task name"), + + linkWithRel("schema/versions").description("List of Spring Boot related schemas"), + linkWithRel("schema/targets").description("List of schema targets"), linkWithRel("streams/definitions").description("Exposes the Streams resource"), linkWithRel("streams/definitions/definition").description("Handle a specific Stream definition"), @@ -142,6 +154,10 @@ public void index() throws Exception { fieldWithPath("['" + Version.REVISION_KEY + "']").description("Incremented each time a change is implemented in this REST API"), fieldWithPath("_links.audit-records.href").description("Link to the audit records"), fieldWithPath("_links.dashboard.href").description("Link to the dashboard"), + + fieldWithPath("_links.schema/versions.href").description("Link to the schema/versions"), + fieldWithPath("_links.schema/targets.href").description("Link to the schema/targets"), + fieldWithPath("_links.streams/definitions.href").description("Link to the streams/definitions"), fieldWithPath("_links.streams/definitions/definition.href").description("Link to the streams/definitions/definition"), fieldWithPath("_links.streams/definitions/definition.templated").type(JsonFieldType.BOOLEAN).optional().description("Link streams/definitions/definition is templated"), @@ -153,6 +169,11 @@ public void index() throws Exception { fieldWithPath("_links.runtime/apps/{appId}/instances.templated").type(JsonFieldType.BOOLEAN).optional().description("Link runtime/apps/{appId}/instances is templated"), fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}.href").description("Link to the runtime/apps/{appId}/instances/{instanceId}"), fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}.templated").type(JsonFieldType.BOOLEAN).optional().description("Link runtime/apps/{appId}/instances/{instanceId} is templated"), + fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}/post.href").description("Link to the runtime/apps/{appId}/instances/{instanceId}/post"), + fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}/post.templated").type(JsonFieldType.BOOLEAN).optional().description("Link runtime/apps/{appId}/instances/{instanceId}/post is templated"), + + fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}/actuator[].href").description("Link to the runtime/apps/{appId}/instances/{instanceId}/actuator"), + fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}/actuator[].templated").type(JsonFieldType.BOOLEAN).optional().description("Link runtime/apps/{appId}/instances/{instanceId}/actuator is templated"), fieldWithPath("_links.runtime/streams.href").description("Link to the runtime/streams"), fieldWithPath("_links.runtime/streams.templated").type(JsonFieldType.BOOLEAN).optional().description("Link runtime/streams is templated"), @@ -196,17 +217,27 @@ public void index() throws Exception { fieldWithPath("_links.tasks/definitions/definition.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/definitions/definition is templated"), fieldWithPath("_links.tasks/executions.href").description("Link to the tasks/executions"), + fieldWithPath("_links.tasks/executions/launch.href").description("Link to tasks/executions/launch"), + fieldWithPath("_links.tasks/executions/launch.templated").type(JsonFieldType.BOOLEAN).optional().description("Indicates that Link tasks/executions/launch is templated"), fieldWithPath("_links.tasks/executions/name.href").description("Link to the tasks/executions/name"), fieldWithPath("_links.tasks/executions/name.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/executions/name is templated"), fieldWithPath("_links.tasks/executions/current.href").description("Link to the tasks/executions/current"), fieldWithPath("_links.tasks/executions/execution.href").description("Link to the tasks/executions/execution"), fieldWithPath("_links.tasks/executions/execution.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/executions/execution is templated"), + fieldWithPath("_links.tasks/executions/external.href").description("Link to the tasks/executions/external"), + fieldWithPath("_links.tasks/executions/external.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/executions/external is templated"), + fieldWithPath("_links.tasks/info/executions.href").description("Link to the tasks/info/executions"), fieldWithPath("_links.tasks/info/executions.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/info is templated"), fieldWithPath("_links.tasks/logs.href").description("Link to the tasks/logs"), fieldWithPath("_links.tasks/logs.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/logs is templated"), + fieldWithPath("_links.tasks/thinexecutions.href").description("Link to the tasks/thinexecutions"), + + fieldWithPath("_links.tasks/thinexecutions/name.href").description("Link to the tasks/thinexecutions/name"), + fieldWithPath("_links.tasks/thinexecutions/name.templated").type(JsonFieldType.BOOLEAN).optional().description("Link to the tasks/thinexecutions/name is templated"), + fieldWithPath("_links.tasks/schedules.href").description("Link to the tasks/executions/schedules"), fieldWithPath("_links.tasks/schedules/instances.href").description("Link to the tasks/schedules/instances"), fieldWithPath("_links.tasks/schedules/instances.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/schedules/instances is templated"), diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java index 43b5054352..0690ad913e 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java @@ -18,7 +18,7 @@ import java.util.Arrays; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.http.MediaType; @@ -42,198 +42,208 @@ * @author Gunnar Hillert * @author Christian Tzolov * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ +@SuppressWarnings("NewClassNamingConvention") public class AppRegistryDocumentation extends BaseDocumentation { - @Test - public void appDefault() throws Exception { - registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - registerApp(ApplicationType.source, "http", "1.3.0.RELEASE"); - - this.mockMvc.perform(RestDocumentationRequestBuilders - .put("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "1.2.0.RELEASE").accept(MediaType.APPLICATION_JSON)) - .andExpect(status().isAccepted()) - .andDo( - this.documentationHandler.document( - pathParameters( - parameterWithName("type").description("The type of application. One of " + Arrays.asList(ApplicationType.values())), - parameterWithName("name").description("The name of the application"), - parameterWithName("version").description("The version of the application") - ) - ) - ); - unregisterApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - unregisterApp(ApplicationType.source, "http", "1.3.0.RELEASE"); - } - - @Test - public void registeringAnApplicationVersion() throws Exception { - this.mockMvc.perform( - post("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "1.1.0.RELEASE") - .param("uri", "maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE")) - .andExpect(status().isCreated()) - .andDo( - this.documentationHandler.document( - pathParameters( - parameterWithName("type") - .description("The type of application to register. One of " + Arrays.asList(ApplicationType.values()) + " (optional)"), - parameterWithName("name").description("The name of the application to register"), - parameterWithName("version").description("The version of the application to register") - ), - requestParameters( - parameterWithName("uri").description("URI where the application bits reside"), - parameterWithName("metadata-uri").optional() - .description("URI where the application metadata jar can be found"), - parameterWithName("force").optional() - .description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") - ) - ) - ); - - unregisterApp(ApplicationType.source, "http", "1.1.0.RELEASE"); - } + @Test + public void appDefault() throws Exception { + registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); + registerApp(ApplicationType.source, "http", "1.3.0.RELEASE"); + + this.mockMvc.perform(RestDocumentationRequestBuilders + .put("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "1.2.0.RELEASE") + .accept(MediaType.APPLICATION_JSON)) + .andExpect(status().isAccepted()) + .andDo( + this.documentationHandler.document( + pathParameters( + parameterWithName("type").description("The type of application. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("name").description("The name of the application"), + parameterWithName("version").description("The version of the application") + ) + ) + ); + unregisterApp(ApplicationType.source, "http", "1.2.0.RELEASE"); + unregisterApp(ApplicationType.source, "http", "1.3.0.RELEASE"); + } + + @Test + public void registeringAnApplicationVersion() throws Exception { + this.mockMvc.perform( + post("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "1.1.0.RELEASE") + .param("uri", "maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE") + .queryParam("bootVersion", "2")) + .andExpect(status().isCreated()) + .andDo( + this.documentationHandler.document( + pathParameters( + parameterWithName("type") + .description("The type of application to register. One of " + Arrays.asList(ApplicationType.values()) + " (optional)"), + parameterWithName("name").description("The name of the application to register"), + parameterWithName("version").description("The version of the application to register") + ), + requestParameters( + parameterWithName("uri").description("URI where the application bits reside"), + parameterWithName("metadata-uri").optional() + .description("URI where the application metadata jar can be found"), + parameterWithName("force").optional() + .description("Must be true if a registration with the same name and type already exists, otherwise an error will occur"), + parameterWithName("bootVersion").optional() + .description("Spring Boot version. Value of 2 or 3. Must be supplied of greater than 2.") + ) + ) + ); + + unregisterApp(ApplicationType.source, "http", "1.1.0.RELEASE"); + } @Test public void bulkRegisteringApps() throws Exception { this.mockMvc.perform( - post("/apps") - .param("apps", "source.http=maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE") - .param("force", "false")) - .andExpect(status().isCreated()) - .andDo( - this.documentationHandler.document( - requestParameters( - parameterWithName("uri").optional().description("URI where a properties file containing registrations can be fetched. Exclusive with `apps`."), - parameterWithName("apps").optional().description("Inline set of registrations. Exclusive with `uri`."), - parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") - ) - ) - ); + post("/apps") + .param("apps", "source.http=maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE") + .param("force", "false")) + .andExpect(status().isCreated()) + .andDo( + this.documentationHandler.document( + requestParameters( + parameterWithName("uri").optional().description("URI where a properties file containing registrations can be fetched. Exclusive with `apps`."), + parameterWithName("apps").optional().description("Inline set of registrations. Exclusive with `uri`."), + parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") + ) + ) + ); unregisterApp(ApplicationType.source, "http"); } - @Test - public void getApplicationsFiltered() throws Exception { - registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - registerApp(ApplicationType.source, "time", "1.2.0.RELEASE"); - this.mockMvc.perform( - get("/apps") - .param("search", "") - .param("type", "source").accept(MediaType.APPLICATION_JSON) - .param("defaultVersion", "true") - .param("page", "0") - .param("size", "10") - .param("sort", "name,ASC") - ) - .andExpect(status().isOk()) - .andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("search").description("The search string performed on the name (optional)"), - parameterWithName("type") - .description("Restrict the returned apps to the type of the app. One of " + Arrays.asList(ApplicationType.values())), - parameterWithName("defaultVersion").description("The boolean flag to set to retrieve only the apps of the default versions (optional)"), - parameterWithName("page").description("The zero-based page number (optional)"), - parameterWithName("sort").description("The sort on the list (optional)"), - parameterWithName("size").description("The requested page size (optional)") - ), - responseFields( - subsectionWithPath("_embedded.appRegistrationResourceList") - .description("Contains a collection of application"), - subsectionWithPath("_links.self").description("Link to the applications resource"), - subsectionWithPath("page").description("Pagination properties") - ) - )); - - unregisterApp(ApplicationType.source, "http"); - unregisterApp(ApplicationType.source, "time"); - } + @Test + public void getApplicationsFiltered() throws Exception { + registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); + registerApp(ApplicationType.source, "time", "1.2.0.RELEASE"); + this.mockMvc.perform( + get("/apps") + .param("search", "") + .param("type", "source").accept(MediaType.APPLICATION_JSON) + .param("defaultVersion", "true") + .param("page", "0") + .param("size", "10") + .param("sort", "name,ASC") + ) + .andExpect(status().isOk()) + .andDo(this.documentationHandler.document( + requestParameters( + parameterWithName("search").description("The search string performed on the name (optional)"), + parameterWithName("type") + .description("Restrict the returned apps to the type of the app. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("defaultVersion").description("The boolean flag to set to retrieve only the apps of the default versions (optional)"), + parameterWithName("page").description("The zero-based page number (optional)"), + parameterWithName("sort").description("The sort on the list (optional)"), + parameterWithName("size").description("The requested page size (optional)") + ), + responseFields( + subsectionWithPath("_embedded.appRegistrationResourceList") + .description("Contains a collection of application"), + subsectionWithPath("_links.self").description("Link to the applications resource"), + subsectionWithPath("page").description("Pagination properties") + ) + )); + + unregisterApp(ApplicationType.source, "http"); + unregisterApp(ApplicationType.source, "time"); + } @Test public void getSingleApplication() throws Exception { registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); this.mockMvc.perform( - get("/apps/{type}/{name}", ApplicationType.source, "http").accept(MediaType.APPLICATION_JSON) - .param("exhaustive", "false")) - .andExpect(status().isOk()) - .andDo( - this.documentationHandler.document( - pathParameters( - parameterWithName("type").description("The type of application to query. One of " + Arrays.asList(ApplicationType.values())), - parameterWithName("name").description("The name of the application to query") - ), - requestParameters( - parameterWithName("exhaustive").optional() - .description("Return all application properties, including common Spring Boot properties") - ), - responseFields( - fieldWithPath("name").description("The name of the application"), - fieldWithPath("label").description("The label name of the application"), - fieldWithPath("type").description("The type of the application. One of " + Arrays.asList(ApplicationType.values())), - fieldWithPath("uri").description("The uri of the application"), - fieldWithPath("version").description("The version of the application"), - fieldWithPath("versions").description("All the registered versions of the application"), - fieldWithPath("defaultVersion").description("If true, the application is the default version"), - subsectionWithPath("options").description("The options of the application (Array)"), - fieldWithPath("shortDescription").description("The description of the application"), - fieldWithPath("inboundPortNames").description("Inbound port names of the application"), - fieldWithPath("outboundPortNames").description("Outbound port names of the application"), - fieldWithPath("optionGroups").description("Option groups of the application") - ) - ) - ); + get("/apps/{type}/{name}", ApplicationType.source, "http").accept(MediaType.APPLICATION_JSON) + .param("exhaustive", "false")) + .andExpect(status().isOk()) + .andDo( + this.documentationHandler.document( + pathParameters( + parameterWithName("type").description("The type of application to query. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("name").description("The name of the application to query") + ), + requestParameters( + parameterWithName("exhaustive").optional() + .description("Return all application properties, including common Spring Boot properties") + ), + responseFields( + fieldWithPath("name").description("The name of the application"), + fieldWithPath("label").description("The label name of the application"), + fieldWithPath("type").description("The type of the application. One of " + Arrays.asList(ApplicationType.values())), + fieldWithPath("uri").description("The uri of the application"), + fieldWithPath("version").description("The version of the application"), + fieldWithPath("versions").description("All the registered versions of the application"), + fieldWithPath("defaultVersion").description("If true, the application is the default version"), + fieldWithPath("bootVersion").description("The version of Spring Boot the application targets (2, 3)"), + subsectionWithPath("options").description("The options of the application (Array)"), + fieldWithPath("shortDescription").description("The description of the application"), + fieldWithPath("inboundPortNames").description("Inbound port names of the application"), + fieldWithPath("outboundPortNames").description("Outbound port names of the application"), + fieldWithPath("optionGroups").description("Option groups of the application") + ) + ) + ); unregisterApp(ApplicationType.source, "http"); } - @Test - public void registeringAnApplication() throws Exception { - this.mockMvc.perform( - post("/apps/{type}/{name}", ApplicationType.source, "http") - .param("uri", "maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE")) - .andExpect(status().isCreated()) - .andDo( - this.documentationHandler.document( - pathParameters( - parameterWithName("type").description("The type of application to register. One of " + Arrays.asList(ApplicationType.values())), - parameterWithName("name").description("The name of the application to register") - ), - requestParameters( - parameterWithName("uri").description("URI where the application bits reside"), - parameterWithName("metadata-uri").optional().description("URI where the application metadata jar can be found"), - parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") - ) - ) - ); - - unregisterApp(ApplicationType.source, "http"); - } - - @Test - public void unregisteringAnApplication() throws Exception { - registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - - this.mockMvc.perform( - delete("/apps/{type}/{name}/{version}", ApplicationType.source, "http", "1.2.0.RELEASE")) - .andExpect(status().isOk()) - .andDo( - this.documentationHandler.document( - pathParameters( - parameterWithName("type").description("The type of application to unregister. One of " + Arrays.asList(ApplicationType.values())), - parameterWithName("name").description("The name of the application to unregister"), - parameterWithName("version").description("The version of the application to unregister (optional)") - ) + @Test + public void registeringAnApplication() throws Exception { + this.mockMvc.perform( + post("/apps/{type}/{name}", ApplicationType.source, "http") + .param("uri", "maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE") + .queryParam("bootVersion", "2") ) - ); - } - - @Test - public void unregisteringAllApplications() throws Exception { - registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - registerApp(ApplicationType.source, "http", "1.3.0.RELEASE"); - this.mockMvc.perform( - delete("/apps")) - .andExpect(status().isOk() - ); - } + .andExpect(status().isCreated()) + .andDo( + this.documentationHandler.document( + pathParameters( + parameterWithName("type").description("The type of application to register. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("name").description("The name of the application to register") + ), + requestParameters( + parameterWithName("uri").description("URI where the application bits reside"), + parameterWithName("metadata-uri").optional().description("URI where the application metadata jar can be found"), + parameterWithName("bootVersion").optional().description("The Spring Boot version of the application.Default is 2"), + parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") + ) + ) + ); + + unregisterApp(ApplicationType.source, "http"); + } + + @Test + public void unregisteringAnApplication() throws Exception { + registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); + + this.mockMvc.perform( + delete("/apps/{type}/{name}/{version}", ApplicationType.source, "http", "1.2.0.RELEASE")) + .andExpect(status().isOk()) + .andDo( + this.documentationHandler.document( + pathParameters( + parameterWithName("type").description("The type of application to unregister. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("name").description("The name of the application to unregister"), + parameterWithName("version").description("The version of the application to unregister (optional)") + ) + ) + ); + } + + @Test + public void unregisteringAllApplications() throws Exception { + registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); + registerApp(ApplicationType.source, "http", "1.3.0.RELEASE"); + this.mockMvc.perform( + delete("/apps")) + .andExpect(status().isOk() + ); + } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java index a9f2c3cbc7..436dc7d02b 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java @@ -16,10 +16,10 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer.MethodName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; @@ -36,18 +36,14 @@ * Documentation for the {@code /audit-records} endpoint. * * @author Gunnar Hillert + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodName.class) public class AuditRecordsDocumentation extends BaseDocumentation { - private static boolean setUpIsDone = false; - - @Before + @BeforeEach public void setup() throws Exception { - if (setUpIsDone) { - return; - } - this.mockMvc.perform( post("/apps/{type}/time", "source") .param("uri", "maven://org.springframework.cloud.stream.app:time-source-rabbit:1.2.0.RELEASE")) @@ -62,7 +58,6 @@ public void setup() throws Exception { .param("definition", "time --format='YYYY MM DD' | log") .param("deploy", "false")) .andExpect(status().isCreated()); - setUpIsDone = true; } @Test @@ -76,7 +71,6 @@ public void listAllAuditRecords() throws Exception { .param("fromDate", "2000-01-01T00:00:00") .param("toDate", "2099-01-01T00:00:00") ) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( requestParameters( @@ -100,7 +94,6 @@ public void listAllAuditRecords() throws Exception { public void getAuditRecord() throws Exception { this.mockMvc.perform( get("/audit-records/{id}", "5")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( @@ -124,7 +117,6 @@ public void getAuditRecord() throws Exception { public void getAuditActionTypes() throws Exception { this.mockMvc.perform( get("/audit-records/audit-action-types")) - .andDo(print()) .andExpect(status().isOk() ); } @@ -133,7 +125,6 @@ public void getAuditActionTypes() throws Exception { public void getAuditOperationTypes() throws Exception { this.mockMvc.perform( get("/audit-records/audit-operation-types")) - .andDo(print()) .andExpect(status().isOk() ); } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java index 76318492e5..ad70a346e5 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -25,17 +26,25 @@ import javax.sql.DataSource; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.RegisterExtension; import org.mockito.ArgumentMatchers; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.core.Launcher; import org.springframework.cloud.dataflow.core.TaskPlatform; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; import org.springframework.cloud.dataflow.server.controller.TaskSchedulerController; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDaoContainer; +import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.support.SchemaUtilities; import org.springframework.cloud.dataflow.server.service.SchedulerService; import org.springframework.cloud.dataflow.server.single.LocalDataflowResource; +import org.springframework.cloud.deployer.spi.app.ActuatorOperations; import org.springframework.cloud.deployer.spi.app.AppDeployer; import org.springframework.cloud.deployer.spi.scheduler.ScheduleInfo; import org.springframework.cloud.deployer.spi.scheduler.ScheduleRequest; @@ -47,9 +56,13 @@ import org.springframework.cloud.skipper.domain.Status; import org.springframework.cloud.skipper.domain.StatusCode; import org.springframework.cloud.skipper.domain.VersionInfo; +import org.springframework.cloud.task.repository.support.DatabaseType; +import org.springframework.context.ApplicationContext; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; -import org.springframework.restdocs.JUnitRestDocumentation; +import org.springframework.jdbc.support.MetaDataAccessException; +import org.springframework.restdocs.RestDocumentationContextProvider; +import org.springframework.restdocs.RestDocumentationExtension; import org.springframework.restdocs.mockmvc.RestDocumentationResultHandler; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.test.web.servlet.MockMvc; @@ -73,16 +86,19 @@ * @author Gunnar Hillert * @author Ilayaperumal Gopinathan * @author Glenn Renfro + * @author Corneil du Plessis */ +@ExtendWith(RestDocumentationExtension.class) public abstract class BaseDocumentation { private static String skipperServerPort; - @ClassRule + @RegisterExtension public final static LocalDataflowResource springDataflowServer = new LocalDataflowResource( - "classpath:rest-docs-config.yml", true, true, true, true, skipperServerPort); - @Before - public void setupMocks() throws Exception{ + "classpath:rest-docs-config.yml", true, true, true, true, skipperServerPort); + + @BeforeEach + public void setupMocks(RestDocumentationContextProvider restDocumentationContextProvider) throws Exception { reset(springDataflowServer.getSkipperClient()); AboutResource about = new AboutResource(); @@ -98,19 +114,17 @@ public void setupMocks() throws Exception{ info.getStatus().setStatusCode(StatusCode.UNKNOWN); when(springDataflowServer.getSkipperClient().status(ArgumentMatchers.anyString())).thenReturn(info); - Deployer deployer = new Deployer("default", "local", mock(AppDeployer.class)); - when(springDataflowServer.getSkipperClient().listDeployers()).thenReturn(Arrays.asList(deployer)); + Deployer deployer = new Deployer("default", "local", mock(AppDeployer.class), mock(ActuatorOperations.class)); + when(springDataflowServer.getSkipperClient().listDeployers()).thenReturn(Collections.singletonList(deployer)); when(springDataflowServer.getSkipperClient().search(ArgumentMatchers.anyString(), ArgumentMatchers.anyBoolean())).thenReturn(new ArrayList<>()); - this.prepareDocumentationTests(springDataflowServer.getWebApplicationContext()); + this.prepareDocumentationTests(springDataflowServer.getWebApplicationContext(), + restDocumentationContextProvider); } public static final String TARGET_DIRECTORY = "target/generated-snippets"; - @Rule - public JUnitRestDocumentation restDocumentation = new JUnitRestDocumentation(TARGET_DIRECTORY); - protected MockMvc mockMvc; protected RestDocumentationResultHandler documentationHandler; @@ -119,26 +133,32 @@ public void setupMocks() throws Exception{ protected DataSource dataSource; - protected void prepareDocumentationTests(WebApplicationContext context) throws Exception{ + protected ApplicationContext context; + + protected void prepareDocumentationTests(WebApplicationContext context, + RestDocumentationContextProvider restDocumentationContextProvider) throws Exception { + this.context = context; this.documentationHandler = document("{class-name}/{method-name}", preprocessResponse(prettyPrint())); this.documentation = new ToggleableResultHandler(documentationHandler); this.mockMvc = MockMvcBuilders.webAppContextSetup(context) - .apply(documentationConfiguration(this.restDocumentation).uris().withPort(9393)) - .alwaysDo((ToggleableResultHandler)this.documentation).build(); + .apply(documentationConfiguration(restDocumentationContextProvider).uris().withPort(9393)) + .alwaysDo((ToggleableResultHandler) this.documentation).build(); this.dataSource = springDataflowServer.getWebApplicationContext().getBean(DataSource.class); - TaskSchedulerController controller = this.springDataflowServer.getWebApplicationContext().getBean(TaskSchedulerController.class); + TaskSchedulerController controller = springDataflowServer.getWebApplicationContext() + .getBean(TaskSchedulerController.class); ReflectionTestUtils.setField(controller, "schedulerService", schedulerService()); - TaskPlatform taskPlatform = this.springDataflowServer.getWebApplicationContext().getBean(TaskPlatform.class); + TaskPlatform taskPlatform = springDataflowServer.getWebApplicationContext().getBean(TaskPlatform.class); Launcher launcher = taskPlatform.getLaunchers().stream().filter(launcherToFilter -> launcherToFilter.getName().equals("default")).findFirst().get(); ReflectionTestUtils.setField(launcher, "scheduler", localTestScheduler()); } /** * Can be used by subclasses to easily register dummy apps, as most endpoints require apps to be effective - * @param type the type of app to register - * @param name the name of the app to register + * + * @param type the type of app to register + * @param name the name of the app to register * @param version the version to register */ void registerApp(ApplicationType type, String name, String version) throws Exception { @@ -147,53 +167,76 @@ void registerApp(ApplicationType type, String name, String version) throws Excep documentation.dontDocument( () -> this.mockMvc.perform( - post(String.format("/apps/%s/%s/%s", type, name, version)) - .param("uri", String.format("maven://%s:%s-%s%s:%s", group, name, type, binder, version))) + post(String.format("/apps/%s/%s/%s", type, name, version)) + .param("uri", String.format("maven://%s:%s-%s%s:%s", group, name, type, binder, version))) .andExpect(status().isCreated()) ); } void unregisterApp(ApplicationType type, String name) throws Exception { documentation.dontDocument( - () -> this.mockMvc.perform( - delete(String.format("/apps/%s/%s", type, name)) - ) - .andExpect(status().isOk()) + () -> this.mockMvc.perform( + delete(String.format("/apps/%s/%s", type, name)) + ) + .andExpect(status().isOk()) ); } void unregisterApp(ApplicationType type, String name, String version) throws Exception { documentation.dontDocument( - () -> this.mockMvc.perform( - delete(String.format("/apps/%s/%s/%s", type, name, version)) - ) - .andExpect(status().isOk()) + () -> this.mockMvc.perform( + delete(String.format("/apps/%s/%s/%s", type, name, version)) + ) + .andExpect(status().isOk()) ); } - void createStream(String name, String definition, boolean deploy) throws Exception{ + void createStream(String name, String definition, boolean deploy) throws Exception { documentation.dontDocument( - () -> this.mockMvc.perform( - post("/streams/definitions") - .param("name", name) - .param("definition", definition) - .param("deploy", String.valueOf(deploy))) - .andExpect(status().isCreated()) + () -> this.mockMvc.perform( + post("/streams/definitions") + .param("name", name) + .param("definition", definition) + .param("deploy", String.valueOf(deploy))) + .andExpect(status().isCreated()) ); } - void destroyStream(String name) throws Exception{ + void destroyStream(String name) throws Exception { documentation.dontDocument( - () -> this.mockMvc.perform( - delete("/streams/definitions/{name}", name)) - .andExpect(status().isOk()) + () -> this.mockMvc.perform( + delete("/streams/definitions/{name}", name)) + .andExpect(status().isOk()) ); } + protected DataflowTaskExecutionMetadataDaoContainer createDataFlowTaskExecutionMetadataDaoContainer(SchemaService schemaService) { + DataflowTaskExecutionMetadataDaoContainer result = new DataflowTaskExecutionMetadataDaoContainer(); + MultiSchemaIncrementerFactory incrementerFactory = new MultiSchemaIncrementerFactory(dataSource); + String databaseType; + try { + databaseType = DatabaseType.fromMetaData(dataSource).name(); + } catch (MetaDataAccessException e) { + throw new IllegalStateException(e); + } + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + DataflowTaskExecutionMetadataDao dao = new JdbcDataflowTaskExecutionMetadataDao( + dataSource, + incrementerFactory.getIncrementer(databaseType, + SchemaUtilities.getQuery("%PREFIX%EXECUTION_METADATA_SEQ", target.getTaskPrefix()) + ), + target.getTaskPrefix() + ); + result.add(target.getName(), dao); + } + return result; + } + /** * A {@link ResultHandler} that can be turned off and on. * * @author Eric Bottard + * @author Corneil du Plessis */ private static class ToggleableResultHandler implements ResultHandler, RestDocs { private final ResultHandler delegate; @@ -230,6 +273,7 @@ public void dontDocument(Callable action) throws Exception { * are not documented. * * @author Eric Bottard + * @author Corneil du Plessis */ @FunctionalInterface public interface RestDocs { @@ -240,8 +284,8 @@ public SchedulerService schedulerService() { return new SchedulerService() { @Override public void schedule(String scheduleName, String taskDefinitionName, - Map taskProperties, List commandLineArgs, - String platformName) { + Map taskProperties, List commandLineArgs, + String platformName) { } @Override @@ -264,7 +308,7 @@ public void unscheduleForTaskDefinition(String taskDefinitionName) { @Override public List list(Pageable pageable, String taskDefinitionName, - String platformName) { + String platformName) { return null; } @@ -345,4 +389,4 @@ public List list() { } }; } - } +} diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java index ee3257a325..2c516f621d 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,35 +21,33 @@ import java.util.HashMap; import java.util.Map; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobParameter; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; -import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; -import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.core.TaskManifest; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; -import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDaoContainer; +import org.springframework.cloud.dataflow.server.repository.JobRepositoryContainer; +import org.springframework.cloud.dataflow.server.repository.TaskBatchDaoContainer; +import org.springframework.cloud.dataflow.server.repository.TaskExecutionDaoContainer; import org.springframework.cloud.task.batch.listener.TaskBatchDao; -import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.dao.TaskExecutionDao; -import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; -import org.springframework.http.MediaType; import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.junit4.SpringRunner; +import static org.assertj.core.api.Assertions.assertThat; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.put; @@ -62,28 +60,37 @@ import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + /** * Documentation for the /jobs/executions endpoint. * * @author Glenn Renfro + * @author Corneil du Plessis */ -@RunWith(SpringRunner.class) -@SpringBootTest(classes = { EmbeddedDataSourceConfiguration.class }) +@SuppressWarnings("NewClassNamingConvention") +@SpringBootTest(classes = {EmbeddedDataSourceConfiguration.class}) @DirtiesContext public class JobExecutionsDocumentation extends BaseDocumentation { private final static String JOB_NAME = "DOCJOB"; - private static boolean initialized; - private JobRepository jobRepository; - private TaskExecutionDao dao; - private TaskBatchDao taskBatchDao; + private JobRepositoryContainer jobRepositoryContainer; + + private TaskExecutionDaoContainer daoContainer; + + private TaskBatchDaoContainer taskBatchDaoContainer; + private JdbcTemplate jdbcTemplate; + private DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer; + + private AggregateExecutionSupport aggregateExecutionSupport; + + private TaskDefinitionReader taskDefinitionReader; - @Before + + @BeforeEach public void setup() throws Exception { - if (!initialized) { registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); initialize(); createJobExecution(JOB_NAME, BatchStatus.STARTED); @@ -98,263 +105,272 @@ public void setup() throws Exception { 1, 1, "2", JOB_NAME + "_1", "default", new Date()); documentation.dontDocument(() -> this.mockMvc.perform( - post("/tasks/definitions") - .param("name", "DOCJOB1") - .param("definition", "timestamp --format='YYYY MM DD'")) + post("/tasks/definitions") + .param("name", "DOCJOB1") + .param("definition", "timestamp --format='YYYY MM DD'")) .andExpect(status().isOk())); - - initialized = true; - } } @Test public void listJobExecutions() throws Exception { this.mockMvc.perform( - get("/jobs/executions") - .param("page", "0") - .param("size", "10")) - .andDo(print()) + get("/jobs/executions") + .param("page", "0") + .param("size", "10")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)")), - responseFields( - subsectionWithPath("_embedded.jobExecutionResourceList") - .description("Contains a collection of Job Executions/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)")), + responseFields( + subsectionWithPath("_embedded.jobExecutionResourceList") + .description("Contains a collection of Job Executions/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") ))); } @Test public void listThinJobExecutions() throws Exception { this.mockMvc.perform( - get("/jobs/thinexecutions") - .param("page", "0") - .param("size", "10")) - .andDo(print()) + get("/jobs/thinexecutions") + .param("page", "0") + .param("size", "10")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)")), - responseFields( - subsectionWithPath("_embedded.jobExecutionThinResourceList") - .description("Contains a collection of Job Executions without step executions included/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") - ))); + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)")), + responseFields( + subsectionWithPath("_embedded.jobExecutionThinResourceList") + .description("Contains a collection of Job Executions without step executions included/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") + ))); } @Test public void listThinJobExecutionsByJobInstanceId() throws Exception { this.mockMvc.perform( - get("/jobs/thinexecutions") - .param("page", "0") - .param("size", "10") - .param("jobInstanceId", "1")) - .andDo(print()) + get("/jobs/thinexecutions") + .param("page", "0") + .param("size", "10") + .param("jobInstanceId", "1")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("jobInstanceId") - .description("Filter result by the job instance id")), - responseFields( - subsectionWithPath("_embedded.jobExecutionThinResourceList") - .description("Contains a collection of Job Executions without step executions included/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") - ))); + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)"), + parameterWithName("jobInstanceId") + .description("Filter result by the job instance id")), + responseFields( + subsectionWithPath("_embedded.jobExecutionThinResourceList") + .description("Contains a collection of Job Executions without step executions included/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") + ))); } @Test public void listThinJobExecutionsByTaskExecutionId() throws Exception { this.mockMvc.perform( - get("/jobs/thinexecutions") - .param("page", "0") - .param("size", "10") - .param("taskExecutionId", "1")) - .andDo(print()) + get("/jobs/thinexecutions") + .param("page", "0") + .param("size", "10") + .param("taskExecutionId", "1")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("taskExecutionId") - .description("Filter result by the task execution id")), - responseFields( - subsectionWithPath("_embedded.jobExecutionThinResourceList") - .description("Contains a collection of Job Executions without step executions included/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") - ))); + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)"), + parameterWithName("taskExecutionId") + .description("Filter result by the task execution id")), + responseFields( + subsectionWithPath("_embedded.jobExecutionThinResourceList") + .description("Contains a collection of Job Executions without step executions included/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") + ))); } @Test public void listThinJobExecutionsByDate() throws Exception { this.mockMvc.perform( - get("/jobs/thinexecutions") - .param("page", "0") - .param("size", "10") - .param("fromDate", "2000-09-24T17:00:45,000") - .param("toDate", "2050-09-24T18:00:45,000")) - .andDo(print()) + get("/jobs/thinexecutions") + .param("page", "0") + .param("size", "10") + .param("fromDate", "2000-09-24T17:00:45,000") + .param("toDate", "2050-09-24T18:00:45,000")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("fromDate") - .description("Filter result from a starting date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'"), - parameterWithName("toDate") - .description("Filter result up to the `to` date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'")), - responseFields( - subsectionWithPath("_embedded.jobExecutionThinResourceList") - .description("Contains a collection of Job Executions without step executions included/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") - ))); + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)"), + parameterWithName("fromDate") + .description("Filter result from a starting date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'"), + parameterWithName("toDate") + .description("Filter result up to the `to` date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'")), + responseFields( + subsectionWithPath("_embedded.jobExecutionThinResourceList") + .description("Contains a collection of Job Executions without step executions included/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") + ))); } @Test public void listJobExecutionsByName() throws Exception { this.mockMvc.perform( - get("/jobs/executions") - .param("name", JOB_NAME) - .param("page", "0") - .param("size", "10")) - .andDo(print()) + get("/jobs/executions") + .param("name", JOB_NAME) + .param("page", "0") + .param("size", "10")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("name") - .description("The name associated with the job execution")), - responseFields( - subsectionWithPath("_embedded.jobExecutionResourceList") - .description("Contains a collection of Job Executions/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)"), + parameterWithName("name") + .description("The name associated with the job execution")), + responseFields( + subsectionWithPath("_embedded.jobExecutionResourceList") + .description("Contains a collection of Job Executions/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") ))); } @Test public void listThinJobExecutionsByName() throws Exception { this.mockMvc.perform( - get("/jobs/thinexecutions") - .param("name", JOB_NAME) - .param("page", "0") - .param("size", "10")) - .andDo(print()) + get("/jobs/thinexecutions") + .param("name", JOB_NAME) + .param("page", "0") + .param("size", "10")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("name") - .description("The name associated with the job execution")), - responseFields( - subsectionWithPath("_embedded.jobExecutionThinResourceList") - .description("Contains a collection of Job Executions without step executions included/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") - ))); + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)"), + parameterWithName("name") + .description("The name associated with the job execution")), + responseFields( + subsectionWithPath("_embedded.jobExecutionThinResourceList") + .description("Contains a collection of Job Executions without step executions included/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") + ))); } @Test public void jobDisplayDetail() throws Exception { this.mockMvc.perform( - get("/jobs/executions/{id}", "2")) - .andDo(print()) - .andExpect(status().isOk()) - .andDo(this.documentationHandler.document( - pathParameters( - parameterWithName("id").description("The id of an existing job execution (required)") - ), - responseFields( - fieldWithPath("executionId").description("The execution ID of the job execution"), - fieldWithPath("stepExecutionCount").description("the number of step of the job execution"), - fieldWithPath("jobId").description("The job ID of the job execution"), - fieldWithPath("taskExecutionId").description("The task execution ID of the job execution"), - fieldWithPath("name").description("The name of the job execution"), - fieldWithPath("startDate").description("The start date of the job execution"), - fieldWithPath("startTime").description("The start time of the job execution"), - fieldWithPath("duration").description("The duration of the job execution"), - fieldWithPath("jobParameters").description("The parameters of the job execution"), - fieldWithPath("jobParametersString").description("The parameters string of the job execution"), - fieldWithPath("restartable").description("The status restartable of the job execution"), - fieldWithPath("abandonable").description("The status abandonable of the job execution"), - fieldWithPath("stoppable").description("The status stoppable of the job execution"), - fieldWithPath("defined").description("The status defined of the job execution"), - fieldWithPath("timeZone").description("The time zone of the job execution"), - subsectionWithPath("jobExecution").description("The details of the job execution"), - subsectionWithPath("jobParameters").description("The job parameters associated with the job execution"), - subsectionWithPath("_links.self").description("Link to the stream definition resource") + get("/jobs/executions/{id}", "2") + .queryParam("schemaTarget", "boot2") ) - )); + .andExpect(status().isOk()) + .andDo(this.documentationHandler.document( + pathParameters( + parameterWithName("id").description("The id of an existing job execution (required)") + ), + requestParameters( + parameterWithName("schemaTarget").description("Schema Target to the Job.").optional() + ), + responseFields( + fieldWithPath("executionId").description("The execution ID of the job execution"), + fieldWithPath("stepExecutionCount").description("the number of step of the job execution"), + fieldWithPath("jobId").description("The job ID of the job execution"), + fieldWithPath("taskExecutionId").description("The task execution ID of the job execution"), + fieldWithPath("name").description("The name of the job execution"), + fieldWithPath("startDate").description("The start date of the job execution"), + fieldWithPath("startTime").description("The start time of the job execution"), + fieldWithPath("duration").description("The duration of the job execution"), + fieldWithPath("jobParameters").description("The parameters of the job execution"), + fieldWithPath("jobParametersString").description("The parameters string of the job execution"), + fieldWithPath("restartable").description("The status restartable of the job execution"), + fieldWithPath("abandonable").description("The status abandonable of the job execution"), + fieldWithPath("stoppable").description("The status stoppable of the job execution"), + fieldWithPath("defined").description("The status defined of the job execution"), + fieldWithPath("timeZone").description("The time zone of the job execution"), + fieldWithPath("schemaTarget").description("The schema target of the job execution"), + subsectionWithPath("jobExecution").description("The details of the job execution"), + subsectionWithPath("jobParameters").description("The job parameters associated with the job execution"), + subsectionWithPath("_links.self").description("Link to the stream definition resource"), + subsectionWithPath("_links.stop").description("Link to stopping the job"), + subsectionWithPath("_links.restart").description("Link to restarting the job") + ) + )); } @Test public void jobStop() throws Exception { - this.mockMvc.perform(put("/jobs/executions/{id}", "1").accept(MediaType.APPLICATION_JSON).param("stop", "true")) - .andDo(print()) + this.mockMvc.perform(put("/jobs/executions/{id}", "1") + .param("stop", "true") + .queryParam("schemaTarget", "boot2") + ) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("id") .description("The id of an existing job execution (required)")) , requestParameters( + parameterWithName("schemaTarget").description("The schema target of the job execution").optional(), parameterWithName("stop") .description("Sends signal to stop the job if set to true")))); } @Test public void jobRestart() throws Exception { - this.mockMvc.perform(put("/jobs/executions/{id}", "2").accept(MediaType.APPLICATION_JSON).param("restart", "true")) - .andDo(print()) + this.mockMvc.perform(put("/jobs/executions/{id}", "2") + .param("restart", "true") + .queryParam("schemaTarget", "boot2") + ) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - pathParameters(parameterWithName("id") - .description("The id of an existing job execution (required)")) - , requestParameters( - parameterWithName("restart") - .description("Sends signal to restart the job if set to true")))); + pathParameters(parameterWithName("id") + .description("The id of an existing job execution (required)")) + , requestParameters( + parameterWithName("schemaTarget").description("The schema target of the job execution").optional(), + parameterWithName("restart") + .description("Sends signal to restart the job if set to true") + ) + ) + ); } - private void initialize() throws Exception { - JobRepositoryFactoryBean repositoryFactoryBean = new JobRepositoryFactoryBean(); - repositoryFactoryBean.setDataSource(this.dataSource); - repositoryFactoryBean.setTransactionManager(new DataSourceTransactionManager(this.dataSource)); - this.jobRepository = repositoryFactoryBean.getObject(); - this.dao = (new TaskExecutionDaoFactoryBean(this.dataSource)).getObject(); - this.taskBatchDao = new JdbcTaskBatchDao(this.dataSource); + private void initialize() { + this.daoContainer = context.getBean(TaskExecutionDaoContainer.class); + this.taskBatchDaoContainer = context.getBean(TaskBatchDaoContainer.class); + this.jobRepositoryContainer = context.getBean(JobRepositoryContainer.class); + this.dataflowTaskExecutionMetadataDaoContainer = context.getBean(DataflowTaskExecutionMetadataDaoContainer.class); + this.aggregateExecutionSupport = context.getBean(AggregateExecutionSupport.class); + this.taskDefinitionReader = context.getBean(TaskDefinitionReader.class); + } private void createJobExecution(String name, BatchStatus status) { - TaskExecution taskExecution = this.dao.createTaskExecution(name, new Date(), Collections.singletonList("--spring.cloud.data.flow.platformname=default"), null); + SchemaVersionTarget schemaVersionTarget = this.aggregateExecutionSupport.findSchemaVersionTarget(name, taskDefinitionReader); + TaskExecutionDao dao = this.daoContainer.get(schemaVersionTarget.getName()); + TaskExecution taskExecution = dao.createTaskExecution(name, new Date(), Collections.singletonList("--spring.cloud.data.flow.platformname=default"), null); Map jobParameterMap = new HashMap<>(); JobParameters jobParameters = new JobParameters(jobParameterMap); - JobExecution jobExecution = this.jobRepository.createJobExecution(this.jobRepository.createJobInstance(name, new JobParameters()), jobParameters, null); - this.taskBatchDao.saveRelationship(taskExecution, jobExecution); + JobRepository jobRepository = this.jobRepositoryContainer.get(schemaVersionTarget.getName()); + JobExecution jobExecution = jobRepository.createJobExecution(jobRepository.createJobInstance(name, new JobParameters()), jobParameters, null); + TaskBatchDao taskBatchDao = this.taskBatchDaoContainer.get(schemaVersionTarget.getName()); + taskBatchDao.saveRelationship(taskExecution, jobExecution); jobExecution.setStatus(status); jobExecution.setStartTime(new Date()); - this.jobRepository.update(jobExecution); - TaskManifest manifest = new TaskManifest(); + jobRepository.update(jobExecution); + final TaskManifest manifest = new TaskManifest(); manifest.setPlatformName("default"); - DataFieldMaxValueIncrementerFactory incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); - - DataflowTaskExecutionMetadataDao metadataDao = new JdbcDataflowTaskExecutionMetadataDao( - dataSource, incrementerFactory.getIncrementer("h2", "task_execution_metadata_seq")); + DataflowTaskExecutionMetadataDao metadataDao = dataflowTaskExecutionMetadataDaoContainer.get(schemaVersionTarget.getName()); + assertThat(metadataDao).isNotNull(); TaskManifest taskManifest = new TaskManifest(); taskManifest.setPlatformName("default"); metadataDao.save(taskExecution, taskManifest); diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java index c789368bc3..44570ff279 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,26 +19,26 @@ import java.util.ArrayList; import java.util.Date; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; import org.springframework.cloud.dataflow.core.ApplicationType; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.server.repository.JobRepositoryContainer; +import org.springframework.cloud.dataflow.server.repository.TaskBatchDaoContainer; +import org.springframework.cloud.dataflow.server.repository.TaskExecutionDaoContainer; import org.springframework.cloud.task.batch.listener.TaskBatchDao; -import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.dao.TaskExecutionDao; -import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; -import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.junit4.SpringRunner; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.payload.PayloadDocumentation.fieldWithPath; @@ -54,27 +54,26 @@ * Documentation for the /jobs/instances endpoint. * * @author Glenn Renfro + * @author Corneil du Plessis */ -@RunWith(SpringRunner.class) +@SuppressWarnings({"NewClassNamingConvention", "SameParameterValue"}) @SpringBootTest(classes = { EmbeddedDataSourceConfiguration.class }) @DirtiesContext public class JobInstancesDocumentation extends BaseDocumentation { private final static String JOB_NAME = "DOCJOB"; - private static boolean initialized; - private JobRepository jobRepository; - private TaskExecutionDao dao; - private TaskBatchDao taskBatchDao; + private JobRepositoryContainer jobRepositoryContainer; + private TaskExecutionDaoContainer daoContainer; + private TaskBatchDaoContainer taskBatchDaoContainer; + private AggregateExecutionSupport aggregateExecutionSupport; + private TaskDefinitionReader taskDefinitionReader; - @Before + @BeforeEach public void setup() throws Exception { - if (!initialized) { - registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); - initialize(); - createJobExecution(JOB_NAME, BatchStatus.STARTED); - initialized = true; - } + registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); + initialize(); + createJobExecution(JOB_NAME, BatchStatus.STARTED); } @Test @@ -84,7 +83,6 @@ public void listJobInstances() throws Exception { .param("name", JOB_NAME) .param("page", "0") .param("size", "10")) - .andDo(print()) .andExpect(status().isOk()).andDo(this.documentationHandler.document( requestParameters( parameterWithName("page") @@ -103,13 +101,15 @@ public void listJobInstances() throws Exception { @Test public void jobDisplayDetail() throws Exception { this.mockMvc.perform( - get("/jobs/instances/{id}", "1")) - .andDo(print()) + get("/jobs/instances/{id}", "1").queryParam("schemaTarget", "boot2")) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( parameterWithName("id").description("The id of an existing job instance (required)") ), + requestParameters( + parameterWithName("schemaTarget").description("Schema target").optional() + ), responseFields( fieldWithPath("jobName").description("The name of the job instance"), fieldWithPath("jobInstanceId").description("The ID of the job instance"), @@ -120,21 +120,24 @@ public void jobDisplayDetail() throws Exception { } - private void initialize() throws Exception { - JobRepositoryFactoryBean repositoryFactoryBean = new JobRepositoryFactoryBean(); - repositoryFactoryBean.setDataSource(this.dataSource); - repositoryFactoryBean.setTransactionManager(new DataSourceTransactionManager(this.dataSource)); - this.jobRepository = repositoryFactoryBean.getObject(); - this.dao = (new TaskExecutionDaoFactoryBean(this.dataSource)).getObject(); - this.taskBatchDao = new JdbcTaskBatchDao(this.dataSource); + private void initialize() { + this.taskDefinitionReader = context.getBean(TaskDefinitionReader.class); + this.aggregateExecutionSupport = context.getBean(AggregateExecutionSupport.class); + this.jobRepositoryContainer = context.getBean(JobRepositoryContainer.class); + this.daoContainer = context.getBean(TaskExecutionDaoContainer.class); + this.taskBatchDaoContainer = context.getBean(TaskBatchDaoContainer.class); } private void createJobExecution(String name, BatchStatus status) { - TaskExecution taskExecution = this.dao.createTaskExecution(name, new Date(), new ArrayList<>(), null); - JobExecution jobExecution = this.jobRepository.createJobExecution(this.jobRepository.createJobInstance(name, new JobParameters()), new JobParameters(), null); - this.taskBatchDao.saveRelationship(taskExecution, jobExecution); + SchemaVersionTarget schemaVersionTarget = this.aggregateExecutionSupport.findSchemaVersionTarget(name, taskDefinitionReader); + TaskExecutionDao dao = this.daoContainer.get(schemaVersionTarget.getName()); + TaskExecution taskExecution = dao.createTaskExecution(name, new Date(), new ArrayList<>(), null); + JobRepository jobRepository = this.jobRepositoryContainer.get(schemaVersionTarget.getName()); + JobExecution jobExecution = jobRepository.createJobExecution(jobRepository.createJobInstance(name, new JobParameters()), new JobParameters(), null); + TaskBatchDao taskBatchDao = this.taskBatchDaoContainer.get(schemaVersionTarget.getName()); + taskBatchDao.saveRelationship(taskExecution, jobExecution); jobExecution.setStatus(status); jobExecution.setStartTime(new Date()); - this.jobRepository.update(jobExecution); + jobRepository.update(jobExecution); } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java index 54b2a37e15..f6d42f0e00 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,27 +19,27 @@ import java.util.ArrayList; import java.util.Date; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; import org.springframework.cloud.dataflow.core.ApplicationType; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.server.repository.JobRepositoryContainer; +import org.springframework.cloud.dataflow.server.repository.TaskBatchDaoContainer; +import org.springframework.cloud.dataflow.server.repository.TaskExecutionDaoContainer; import org.springframework.cloud.task.batch.listener.TaskBatchDao; -import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.dao.TaskExecutionDao; -import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; -import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.junit4.SpringRunner; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; @@ -55,118 +55,125 @@ * Documentation for the /jobs/executions/{id}/steps endpoint. * * @author Glenn Renfro + * @author Corneil du Plessis */ -@RunWith(SpringRunner.class) -@SpringBootTest(classes = { EmbeddedDataSourceConfiguration.class }) +@SuppressWarnings({"NewClassNamingConvention", "SameParameterValue"}) +@SpringBootTest(classes = {EmbeddedDataSourceConfiguration.class}) @DirtiesContext public class JobStepExecutionsDocumentation extends BaseDocumentation { private final static String JOB_NAME = "DOCJOB"; - private static boolean initialized; - private JobRepository jobRepository; - private TaskExecutionDao dao; - private TaskBatchDao taskBatchDao; + private JobRepositoryContainer jobRepositoryContainer; + private TaskExecutionDaoContainer daoContainer; + private TaskBatchDaoContainer taskBatchDaoContainer; + private AggregateExecutionSupport aggregateExecutionSupport; + private TaskDefinitionReader taskDefinitionReader; - @Before + @BeforeEach public void setup() throws Exception { - if (!initialized) { - registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); - initialize(); - createJobExecution(JOB_NAME, BatchStatus.STARTED); - - documentation.dontDocument(() -> this.mockMvc.perform( - post("/tasks/definitions") - .param("name", "DOCJOB1") - .param("definition", "timestamp --format='YYYY MM DD'")) - .andExpect(status().isOk())); - - initialized = true; - } + registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); + initialize(); + createJobExecution(JOB_NAME, BatchStatus.STARTED); + + documentation.dontDocument(() -> this.mockMvc.perform( + post("/tasks/definitions") + .param("name", "DOCJOB1") + .param("definition", "timestamp --format='YYYY MM DD'")) + .andExpect(status().isOk())); } @Test public void listStepExecutionsForJob() throws Exception { this.mockMvc.perform( - get("/jobs/executions/{id}/steps", "1") - .param("page", "0") - .param("size", "10")) + get("/jobs/executions/{id}/steps", "1") + .param("page", "0") + .param("size", "10")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)")), - pathParameters(parameterWithName("id") - .description("The id of an existing job execution (required)")), - responseFields( - subsectionWithPath("_embedded.stepExecutionResourceList") - .description("Contains a collection of Step Executions/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties")))); + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)")), + pathParameters(parameterWithName("id") + .description("The id of an existing job execution (required)")), + responseFields( + subsectionWithPath("_embedded.stepExecutionResourceList") + .description("Contains a collection of Step Executions/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties")))); } @Test public void stepDetail() throws Exception { this.mockMvc.perform( - get("/jobs/executions/{id}/steps/{stepid}", "1", "1")) - .andExpect(status().isOk()).andDo(this.documentationHandler.document( - pathParameters( - parameterWithName("id").description("The id of an existing job execution (required)"), - parameterWithName("stepid") - .description("The id of an existing step execution for a specific job execution (required)") - ), - responseFields( - fieldWithPath("jobExecutionId").description("The ID of the job step execution"), - fieldWithPath("stepType").description("The type of the job step execution"), - subsectionWithPath("stepExecution").description("The step details of the job step execution"), - subsectionWithPath("_links.self").description("Link to the job step execution resource") - ) - )); + get("/jobs/executions/{id}/steps/{stepid}", "1", "1").queryParam("schemaTarget", "boot2")) + .andExpect(status().isOk()).andDo(this.documentationHandler.document( + pathParameters( + parameterWithName("id").description("The id of an existing job execution (required)"), + parameterWithName("stepid") + .description("The id of an existing step execution for a specific job execution (required)") + ), + requestParameters( + parameterWithName("schemaTarget").description("Schema target").optional() + ), + responseFields( + fieldWithPath("jobExecutionId").description("The ID of the job step execution"), + fieldWithPath("stepType").description("The type of the job step execution"), + fieldWithPath("schemaTarget").description("The schema target name of the job and task state data"), + subsectionWithPath("stepExecution").description("The step details of the job step execution"), + subsectionWithPath("_links.self").description("Link to the job step execution resource"), + subsectionWithPath("_links.progress").description("Link to retrieve the progress") + ) + )); } @Test public void stepProgress() throws Exception { this.mockMvc.perform( - get("/jobs/executions/{id}/steps/{stepid}/progress", "1", "1")) - .andExpect(status().isOk()).andDo(this.documentationHandler.document( - pathParameters( - parameterWithName("id").description("The id of an existing job execution (required)"), - parameterWithName("stepid") - .description("The id of an existing step execution for a specific job execution (required)") - ), - responseFields( - subsectionWithPath("stepExecution").description("The detailed step details of the job step execution"), - subsectionWithPath("stepExecutionHistory") - .description("The history of the job step execution"), - fieldWithPath("percentageComplete").description("The percentage complete of the job step execution"), - fieldWithPath("finished").description("The status finished of the job step execution"), - fieldWithPath("duration").description("The duration of the job step execution"), - subsectionWithPath("_links.self").description("Link to the job step execution resource") - ) - )); + get("/jobs/executions/{id}/steps/{stepid}/progress", "1", "1")) + .andExpect(status().isOk()).andDo(this.documentationHandler.document( + pathParameters( + parameterWithName("id").description("The id of an existing job execution (required)"), + parameterWithName("stepid") + .description("The id of an existing step execution for a specific job execution (required)") + ), + responseFields( + subsectionWithPath("stepExecution").description("The detailed step details of the job step execution"), + subsectionWithPath("stepExecutionHistory") + .description("The history of the job step execution"), + fieldWithPath("percentageComplete").description("The percentage complete of the job step execution"), + fieldWithPath("finished").description("The status finished of the job step execution"), + fieldWithPath("duration").description("The duration of the job step execution"), + subsectionWithPath("_links.self").description("Link to the job step execution resource"), + subsectionWithPath("_links.progress").description("Link to the job step progress") + ) + )); } - private void initialize() throws Exception { - JobRepositoryFactoryBean repositoryFactoryBean = new JobRepositoryFactoryBean(); - repositoryFactoryBean.setDataSource(this.dataSource); - repositoryFactoryBean.setTransactionManager(new DataSourceTransactionManager(this.dataSource)); - this.jobRepository = repositoryFactoryBean.getObject(); - this.dao = (new TaskExecutionDaoFactoryBean(this.dataSource)).getObject(); - this.taskBatchDao = new JdbcTaskBatchDao(this.dataSource); + private void initialize() { + this.aggregateExecutionSupport = context.getBean(AggregateExecutionSupport.class); + this.jobRepositoryContainer = context.getBean(JobRepositoryContainer.class); + this.daoContainer = context.getBean(TaskExecutionDaoContainer.class); + this.taskBatchDaoContainer = context.getBean(TaskBatchDaoContainer.class); + this.taskDefinitionReader = context.getBean(TaskDefinitionReader.class); } private void createJobExecution(String name, BatchStatus status) { - TaskExecution taskExecution = this.dao.createTaskExecution(name, new Date(), new ArrayList<>(), null); - JobExecution jobExecution = this.jobRepository.createJobExecution(this.jobRepository.createJobInstance(name, new JobParameters()), new JobParameters(), null); + SchemaVersionTarget schemaVersionTarget = this.aggregateExecutionSupport.findSchemaVersionTarget(name, taskDefinitionReader); + TaskExecutionDao dao = this.daoContainer.get(schemaVersionTarget.getName()); + TaskExecution taskExecution = dao.createTaskExecution(name, new Date(), new ArrayList<>(), null); + JobRepository jobRepository = this.jobRepositoryContainer.get(schemaVersionTarget.getName()); + JobExecution jobExecution = jobRepository.createJobExecution(jobRepository.createJobInstance(name, new JobParameters()), new JobParameters(), null); StepExecution stepExecution = new StepExecution(name + "_STEP", jobExecution, jobExecution.getId()); stepExecution.setId(null); jobRepository.add(stepExecution); - this.taskBatchDao.saveRelationship(taskExecution, jobExecution); + TaskBatchDao taskBatchDao = taskBatchDaoContainer.get(schemaVersionTarget.getName()); + taskBatchDao.saveRelationship(taskExecution, jobExecution); jobExecution.setStatus(status); jobExecution.setStartTime(new Date()); - this.jobRepository.update(jobExecution); + jobRepository.update(jobExecution); } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeAppsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeAppsDocumentation.java index 8a40bae482..a45f04bb5d 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeAppsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeAppsDocumentation.java @@ -19,9 +19,9 @@ import java.util.ArrayList; import java.util.List; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.skipper.domain.Info; @@ -41,18 +41,20 @@ * * @author Eric Bottard * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ +@SuppressWarnings("NewClassNamingConvention") @DirtiesContext public class RuntimeAppsDocumentation extends BaseDocumentation { - @Before + @BeforeEach public void setup() throws Exception { registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); registerApp(ApplicationType.sink, "log", "1.2.0.RELEASE"); createStream("mystream", "http | log", true); } - @After + @AfterEach public void cleanup() throws Exception { destroyStream("mystream"); unregisterApp(ApplicationType.source, "http"); diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation.java index cd985e1669..d78f1aae9f 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation.java @@ -16,8 +16,8 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.springframework.http.MediaType; @@ -26,8 +26,10 @@ /** * @author Gunnar Hillert + * @author Corneil du Plessis */ -@Ignore +@SuppressWarnings("NewClassNamingConvention") +@Disabled public class RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation extends BaseDocumentation { @Test diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/SchemaDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/SchemaDocumentation.java new file mode 100644 index 0000000000..a7198a0f3b --- /dev/null +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/SchemaDocumentation.java @@ -0,0 +1,64 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.rest.documentation; + +import org.junit.jupiter.api.MethodOrderer.MethodName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; + +import org.springframework.http.MediaType; +import org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders; + +import static org.springframework.restdocs.payload.PayloadDocumentation.fieldWithPath; +import static org.springframework.restdocs.payload.PayloadDocumentation.responseFields; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +/** + * Creates asciidoc snippets for endpoints exposed by {@literal SchemaController}. + + * @author Corneil du Plessis + */ +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodName.class) +public class SchemaDocumentation extends BaseDocumentation { + + @Test + public void schemaVersions() throws Exception { + + this.mockMvc.perform(RestDocumentationRequestBuilders + .get("/schema/versions").accept(MediaType.APPLICATION_JSON)) + .andExpect(status().isOk()) + .andDo( + this.documentationHandler.document( + responseFields( + fieldWithPath("defaultSchemaVersion").description("The default version used when registering without a bootVersion"), + fieldWithPath("versions").description("The list of versions supported") + ) + ) + ); + } + + + @Test + public void schemaTargets() throws Exception { + + this.mockMvc.perform(RestDocumentationRequestBuilders + .get("/schema/targets").accept(MediaType.APPLICATION_JSON)) + .andExpect(status().isOk()) + .andDo(this.documentationHandler.document()); + } +} diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java index a90d004051..6eec916acb 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java @@ -17,11 +17,12 @@ package org.springframework.cloud.dataflow.server.rest.documentation; import java.util.Arrays; +import java.util.concurrent.Callable; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer.MethodName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.dataflow.core.ApplicationType; @@ -42,19 +43,14 @@ * * @author Gunnar Hillert * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodName.class) public class StreamDefinitionsDocumentation extends BaseDocumentation { - private static boolean setUpIsDone = false; - - @Before + @BeforeEach public void setup() throws Exception { - if (setUpIsDone) { - return; - } - - this.mockMvc.perform( post("/apps/{type}/time", "source") .param("uri", "maven://org.springframework.cloud.stream.app:time-source-rabbit:1.2.0.RELEASE")) @@ -63,7 +59,6 @@ public void setup() throws Exception { post("/apps/{type}/log", "sink") .param("uri", "maven://org.springframework.cloud.stream.app:log-sink-rabbit:1.2.0.RELEASE")) .andExpect(status().isCreated()); - setUpIsDone = true; } @Test @@ -97,13 +92,13 @@ public void createDefinition() throws Exception { @Test public void listAllStreamDefinitions() throws Exception { + createStream("timelog", "time --format='YYYY MM DD' | log", false); this.mockMvc.perform( get("/streams/definitions") .param("page", "0") .param("sort", "name,ASC") .param("search", "") .param("size", "10")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( requestParameters( @@ -120,9 +115,9 @@ public void listAllStreamDefinitions() throws Exception { @Test public void getStreamDefinition() throws Exception { + createStream("timelog", "time --format='YYYY MM DD' | log", false); this.mockMvc.perform( get("/streams/definitions/{name}", "timelog")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( @@ -145,7 +140,6 @@ public void getStreamApplications() throws Exception { createStream("mysamplestream", "time | log", false); this.mockMvc.perform( get("/streams/definitions/{name}/applications", "mysamplestream")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( @@ -160,6 +154,7 @@ public void getStreamApplications() throws Exception { fieldWithPath("[].uri").description("The uri of the application"), fieldWithPath("[].version").description("The version of the application"), fieldWithPath("[].defaultVersion").description("If true, the application is the default version"), + fieldWithPath("[].bootVersion").description("The version of Spring Boot the application targets (2, 3)"), fieldWithPath("[].versions").description("All the registered versions of the application"), fieldWithPath("[]._links.self.href").description("Link to the application resource") ))); @@ -167,6 +162,7 @@ public void getStreamApplications() throws Exception { @Test public void listRelatedStreamDefinitions() throws Exception { + createStream("timelog", "time --format='YYYY MM DD' | log", false); this.mockMvc.perform( get("/streams/definitions/{name}/related", "timelog") .param("page", "0") @@ -174,7 +170,6 @@ public void listRelatedStreamDefinitions() throws Exception { .param("search", "") .param("size", "10") .param("nested", "true")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( requestParameters( @@ -196,9 +191,9 @@ public void listRelatedStreamDefinitions() throws Exception { @Test public void streamDefinitionDelete1() throws Exception { + createStream("timelog", "time --format='YYYY MM DD' | log", false); this.mockMvc.perform( delete("/streams/definitions/{name}", "timelog")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("name") @@ -210,7 +205,6 @@ public void streamDefinitionDelete1() throws Exception { public void streamDefinitionDeleteAll() throws Exception { this.mockMvc.perform( delete("/streams/definitions")) - .andDo(print()) .andExpect(status().isOk()); } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java index e6bdf4afd3..4b83244c3d 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,16 +17,16 @@ package org.springframework.cloud.dataflow.server.rest.documentation; import java.io.IOException; -import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer.MethodName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.dataflow.rest.UpdateStreamRequest; import org.springframework.cloud.skipper.domain.PackageIdentifier; @@ -42,25 +42,20 @@ import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName; import static org.springframework.restdocs.request.RequestDocumentation.pathParameters; import static org.springframework.restdocs.request.RequestDocumentation.requestParameters; -import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * @author Glenn Renfro * @author Ilayaperumal Gopinathan * @author Christian Tzolov + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodName.class) public class StreamDeploymentsDocumentation extends BaseDocumentation { - private static boolean setUpIsDone = false; - - @Before + @BeforeEach public void setup() throws Exception { - if (setUpIsDone) { - return; - } - this.mockMvc.perform( post("/apps/{type}/time", "source") .param("uri", "maven://org.springframework.cloud.stream.app:time-source-rabbit:1.2.0.RELEASE") @@ -83,7 +78,6 @@ public void setup() throws Exception { .param("definition", "time --format='YYYY MM DD' | log") .param("deploy", "false")) .andExpect(status().isCreated()); - setUpIsDone = true; } @Test @@ -93,7 +87,6 @@ public void scale() throws Exception { post("/streams/deployments/scale/{streamName}/{appName}/instances/{count}", "timelog", "log", 1) .contentType(MediaType.APPLICATION_JSON) .content(json)) - .andDo(print()) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document(pathParameters( parameterWithName("streamName") @@ -148,7 +141,6 @@ public void deploy() throws Exception { post("/streams/deployments/{timelog}", "timelog") .contentType(MediaType.APPLICATION_JSON) .content(json)) - .andDo(print()) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("timelog") @@ -163,19 +155,17 @@ public void streamUpdate() throws Exception { post("/streams/deployments/{timelog1}", "timelog1") .contentType(MediaType.APPLICATION_JSON) .content(json)) - .andDo(print()) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("timelog1") .description("The name of an existing stream definition (required)")) )); - Thread.sleep(30000); UpdateStreamRequest updateStreamRequest = new UpdateStreamRequest(); updateStreamRequest.setReleaseName("timelog1"); Map updateProperties = new HashMap<>(); updateProperties.put("app.time.timestamp.format", "YYYYMMDD"); updateStreamRequest.setUpdateProperties(updateProperties); - final String releaseName = "myLogRelease"; + final PackageIdentifier packageIdentifier = new PackageIdentifier(); packageIdentifier.setPackageName("timelog1"); packageIdentifier.setPackageVersion("1.0.0"); @@ -186,40 +176,35 @@ public void streamUpdate() throws Exception { post("/streams/deployments/update/{timelog1}", "timelog1") .contentType(MediaType.APPLICATION_JSON) .content(convertObjectToJson(updateStreamRequest))) - .andDo(print()) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("timelog1") .description("The name of an existing stream definition (required)")) )); - Thread.sleep(30000); } @Test public void rollback() throws Exception { - RollbackRequest rollbackRequest = new RollbackRequest(); + final RollbackRequest rollbackRequest = new RollbackRequest(); rollbackRequest.setReleaseName("timelog1"); this.mockMvc.perform( post("/streams/deployments/rollback/{name}/{version}", "timelog1", 1) .contentType(MediaType.APPLICATION_JSON)) - .andDo(print()) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("name") .description("The name of an existing stream definition (required)"), parameterWithName("version").description("The version to rollback to")))); - Thread.sleep(30000); } @Test public void history() throws Exception { - when(this.springDataflowServer.getSkipperClient().history(anyString())) - .thenReturn(Arrays.asList(new Release())); + when(springDataflowServer.getSkipperClient().history(anyString())) + .thenReturn(Collections.singletonList(new Release())); this.mockMvc.perform( get("/streams/deployments/history/{name}", "timelog1") .contentType(MediaType.APPLICATION_JSON)) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("name") @@ -231,7 +216,6 @@ public void manifest() throws Exception { this.mockMvc.perform( get("/streams/deployments/manifest/{name}/{version}", "timelog1", 1) .contentType(MediaType.APPLICATION_JSON)) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("name") @@ -244,15 +228,13 @@ public void platformList() throws Exception { this.mockMvc.perform( get("/streams/deployments/platform/list") .contentType(MediaType.APPLICATION_JSON)) - .andDo(print()) .andExpect(status().isOk()); } public static String convertObjectToJson(Object object) throws IOException { ObjectMapper mapper = new ObjectMapper(); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); - String json = mapper.writeValueAsString(object); - return json; + return mapper.writeValueAsString(object); } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamLogsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamLogsDocumentation.java index 5e60efb9ee..423c5ecac3 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamLogsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamLogsDocumentation.java @@ -19,9 +19,9 @@ import java.util.HashMap; import java.util.Map; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.MethodOrderer.MethodName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.skipper.domain.LogInfo; @@ -34,8 +34,10 @@ * Documentation for the {@code /streams/logs} endpoint. * * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodName.class) public class StreamLogsDocumentation extends BaseDocumentation { @Test @@ -48,7 +50,6 @@ public void getLogsByStreamName() throws Exception { when(springDataflowServer.getSkipperClient().getLog("ticktock")).thenReturn(logInfo); this.mockMvc.perform( get("/streams/logs/ticktock")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document()); } @@ -62,7 +63,6 @@ public void getLogsByAppName() throws Exception { when(springDataflowServer.getSkipperClient().getLog("ticktock", "ticktock-log-v1")).thenReturn(logInfo); this.mockMvc.perform( get("/streams/logs/ticktock/ticktock-log-v1")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document()); } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamValidationDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamValidationDocumentation.java index 6945376216..baead942fe 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamValidationDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamValidationDocumentation.java @@ -16,10 +16,10 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer.MethodName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; @@ -34,18 +34,14 @@ * Documentation for the /streams/validation endpoint. * * @author Glenn Renfro + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodName.class) public class StreamValidationDocumentation extends BaseDocumentation { - private static boolean setUpIsDone = false; - - @Before + @BeforeEach public void setup() throws Exception { - if (setUpIsDone) { - return; - } - this.mockMvc.perform( post("/apps/{type}/time", "source") .param("uri", "maven://org.springframework.cloud.stream.app:time-source-rabbit:1.2.0.RELEASE") @@ -56,7 +52,6 @@ public void setup() throws Exception { .param("uri", "maven://org.springframework.cloud.stream.app:log-sink-rabbit:1.2.0.RELEASE") .param("force", "true")) .andExpect(status().isCreated()); - setUpIsDone = true; } @Test diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java index 44c11c2566..c2ddef0ae7 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java @@ -16,11 +16,11 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.After; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer.MethodName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.dataflow.core.ApplicationType; @@ -41,17 +41,18 @@ * * @author Eric Bottard * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ - -@FixMethodOrder(MethodSorters.NAME_ASCENDING) +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodName.class) public class TaskDefinitionsDocumentation extends BaseDocumentation { - @Before + @BeforeEach public void setup() throws Exception { registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); } - @After + @AfterEach public void tearDown() throws Exception { unregisterApp(ApplicationType.task, "timestamp"); } @@ -86,6 +87,12 @@ public void createDefinition() throws Exception { @Test public void listAllTaskDefinitions() throws Exception { + documentation.dontDocument(()->this.mockMvc.perform( + post("/tasks/definitions") + .param("name", "my-task") + .param("definition", "timestamp --format='YYYY MM DD'") + .param("description", "Demo task definition for testing")) + .andExpect(status().isOk())); this.mockMvc.perform( get("/tasks/definitions") .param("page", "0") @@ -94,7 +101,6 @@ public void listAllTaskDefinitions() throws Exception { .param("search", "") .param("manifest", "true") ) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( requestParameters( @@ -113,10 +119,15 @@ public void listAllTaskDefinitions() throws Exception { @Test public void displayDetail() throws Exception { + documentation.dontDocument(()->this.mockMvc.perform( + post("/tasks/definitions") + .param("name", "my-task") + .param("definition", "timestamp --format='YYYY MM DD'") + .param("description", "Demo task definition for testing")) + .andExpect(status().isOk())); this.mockMvc.perform( get("/tasks/definitions/{my-task}","my-task") .param("manifest", "true")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( @@ -141,10 +152,15 @@ public void displayDetail() throws Exception { @Test public void taskDefinitionDelete() throws Exception { + documentation.dontDocument(()->this.mockMvc.perform( + post("/tasks/definitions") + .param("name", "my-task") + .param("definition", "timestamp --format='YYYY MM DD'") + .param("description", "Demo task definition for testing")) + .andExpect(status().isOk())); this.mockMvc.perform( delete("/tasks/definitions/{my-task}", "my-task") .param("cleanup", "true")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java index 3db635cda1..91925ac91e 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2019 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,13 +16,19 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.After; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import java.util.concurrent.atomic.AtomicReference; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer.MethodName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.dataflow.core.ApplicationType; +import org.springframework.restdocs.payload.JsonFieldType; +import org.springframework.test.web.servlet.MvcResult; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.delete; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; @@ -43,68 +49,107 @@ * @author Glenn Renfro * @author David Turanski * @author Gunnar Hillert + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodName.class) public class TaskExecutionsDocumentation extends BaseDocumentation { - @Before + @BeforeEach public void setup() throws Exception { registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); createTaskDefinition("taskA"); createTaskDefinition("taskB"); - + executeTask("taskA"); + executeTask("taskB"); } - @After + + @AfterEach public void tearDown() throws Exception { + cleanupTaskExecutions("taskA"); + cleanupTaskExecutions("taskB"); destroyTaskDefinition("taskA"); destroyTaskDefinition("taskB"); unregisterApp(ApplicationType.task, "timestamp"); } + @Test + public void launchTaskBoot3() throws Exception { + this.mockMvc.perform( + post("/tasks/executions/launch") + .param("name", "taskA") + .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .param("arguments", "--server.port=8080 --foo=bar") + ) + .andExpect(status().isCreated()) + .andDo(this.documentationHandler.document( + requestParameters( + parameterWithName("name").description("The name of the task definition to launch"), + parameterWithName("properties") + .description("Application and Deployer properties to use while launching. (optional)"), + parameterWithName("arguments") + .description("Command line arguments to pass to the task. (optional)")), + responseFields( + fieldWithPath("executionId").description("The id of the task execution"), + fieldWithPath("schemaTarget").description("The schema target of the task state data"), + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("_links.tasks/logs").type(fieldWithPath("_links.tasks/logs").ignored().optional()).description("Link to the task execution logs").optional() + ) + ) + ); + } + @Test public void launchTask() throws Exception { this.mockMvc.perform( - post("/tasks/executions") - .param("name", "taskA") - .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") - .param("arguments", "--server.port=8080 --foo=bar")) + post("/tasks/executions") + .param("name", "taskA") + .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .param("arguments", "--server.port=8080 --foo=bar") + ) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("name").description("The name of the task definition to launch"), - parameterWithName("properties").optional() - .description("Application and Deployer properties to use while launching"), - parameterWithName("arguments").optional() - .description("Command line arguments to pass to the task")))); + requestParameters( + parameterWithName("name").description("The name of the task definition to launch"), + parameterWithName("properties") + .description("Application and Deployer properties to use while launching. (optional)"), + parameterWithName("arguments") + .description("Command line arguments to pass to the task. (optional)") + ) + ) + ); } @Test public void getTaskCurrentCount() throws Exception { this.mockMvc.perform( - get("/tasks/executions/current")) - .andDo(print()) + get("/tasks/executions/current") + ) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - responseFields( - fieldWithPath("[].name").description("The name of the platform instance (account)"), - fieldWithPath("[].type").description("The platform type"), - fieldWithPath("[].maximumTaskExecutions").description("The number of maximum task execution"), - fieldWithPath("[].runningExecutionCount").description("The number of running executions") - ) + responseFields( + fieldWithPath("[].name").description("The name of the platform instance (account)"), + fieldWithPath("[].type").description("The platform type"), + fieldWithPath("[].maximumTaskExecutions").description("The number of maximum task execution"), + fieldWithPath("[].runningExecutionCount").description("The number of running executions") + ) )); } @Test - public void launchTaskDisplayDetail() throws Exception { + public void getTaskDisplayDetail() throws Exception { this.mockMvc.perform( - get("/tasks/executions/{id}", "1")) - .andDo(print()) + get("/tasks/executions/{id}", "1").queryParam("schemaTarget", "boot2") + ) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( parameterWithName("id").description("The id of an existing task execution (required)") ), + requestParameters( + parameterWithName("schemaTarget").description("The schemaTarget provided in Task execution detail") + ), responseFields( fieldWithPath("executionId").description("The id of the task execution"), fieldWithPath("exitCode").description("The exit code of the task execution"), @@ -119,100 +164,215 @@ public void launchTaskDisplayDetail() throws Exception { fieldWithPath("taskExecutionStatus").description("The status of the task execution"), fieldWithPath("parentExecutionId").description("The id of parent task execution, " + "null if task execution does not have parent"), + fieldWithPath("schemaTarget").description("The schema target of the task state data"), fieldWithPath("resourceUrl").description("The resource URL that defines the task that was executed"), subsectionWithPath("appProperties").description("The application properties of the task execution"), - subsectionWithPath("deploymentProperties").description("The deployment properties of the task exectuion"), subsectionWithPath("deploymentProperties").description("The deployment properties of the task execution"), subsectionWithPath("platformName").description("The platform selected for the task execution"), - subsectionWithPath("_links.self").description("Link to the task execution resource") + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("_links.tasks/logs").description("Link to the task execution logs") ) )); } + @Test + public void getTaskDisplayDetailByExternalId() throws Exception { + final AtomicReference externalExecutionId = new AtomicReference<>(null); + documentation.dontDocument(() -> { + MvcResult mvcResult = this.mockMvc.perform( + get("/tasks/executions") + .param("page", "0") + .param("size", "20")) + .andExpect(status().isOk()).andReturn(); + ObjectMapper mapper = new ObjectMapper(); + JsonNode node = mapper.readTree(mvcResult.getResponse().getContentAsString()); + JsonNode list = node.get("_embedded").get("taskExecutionResourceList"); + JsonNode first = list.get(0); + externalExecutionId.set(first.get("externalExecutionId").asText()); + return externalExecutionId.get(); + }); + + this.mockMvc.perform( + get("/tasks/executions/external/{externalExecutionId}", externalExecutionId.get()).queryParam("platform", "default") + ) + .andExpect(status().isOk()) + .andDo(this.documentationHandler.document( + pathParameters( + parameterWithName("externalExecutionId").description("The external ExecutionId of an existing task execution (required)") + ), + requestParameters( + parameterWithName("platform").description("The name of the platform.") + ), + responseFields( + fieldWithPath("executionId").description("The id of the task execution"), + fieldWithPath("exitCode").description("The exit code of the task execution"), + fieldWithPath("taskName").description("The task name related to the task execution"), + fieldWithPath("startTime").description("The start time of the task execution"), + fieldWithPath("endTime").description("The end time of the task execution"), + fieldWithPath("exitMessage").description("The exit message of the task execution"), + fieldWithPath("arguments").description("The arguments of the task execution"), + fieldWithPath("jobExecutionIds").description("The job executions ids of the task executions"), + fieldWithPath("errorMessage").description("The error message of the task execution"), + fieldWithPath("externalExecutionId").description("The external id of the task execution"), + fieldWithPath("taskExecutionStatus").description("The status of the task execution"), + fieldWithPath("parentExecutionId").description("The id of parent task execution, " + + "null if task execution does not have parent"), + fieldWithPath("schemaTarget").description("The schema target of the task state data"), + fieldWithPath("resourceUrl").description("The resource URL that defines the task that was executed"), + subsectionWithPath("appProperties").description("The application properties of the task execution"), + subsectionWithPath("deploymentProperties").description("The deployment properties of the task execution"), + subsectionWithPath("platformName").description("The platform selected for the task execution"), + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("_links.tasks/logs").description("Link to the task execution logs") + ) + )); + } @Test public void listTaskExecutions() throws Exception { - documentation.dontDocument( () -> this.mockMvc.perform( - post("/tasks/executions") - .param("name", "taskB") - .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") - .param("arguments", "--server.port=8080 --foo=bar")) + documentation.dontDocument(() -> this.mockMvc.perform( + post("/tasks/executions") + .param("name", "taskB") + .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .param("arguments", "--server.port=8080 --foo=bar") + ) .andExpect(status().isCreated())); this.mockMvc.perform( - get("/tasks/executions") - .param("page", "0") - .param("size", "10")) - .andDo(print()) + get("/tasks/executions") + .param("page", "1") + .param("size", "2")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)") + ), + responseFields( + subsectionWithPath("_embedded.taskExecutionResourceList") + .description("Contains a collection of Task Executions/"), + subsectionWithPath("_links.self").description("Link to the task execution resource").type(JsonFieldType.OBJECT), + subsectionWithPath("_links.first").description("Link to the first page of task execution resources").type(JsonFieldType.OBJECT).optional(), + subsectionWithPath("_links.last").description("Link to the last page of task execution resources").type(JsonFieldType.OBJECT).optional(), + subsectionWithPath("_links.next").description("Link to the next page of task execution resources").type(JsonFieldType.OBJECT).optional(), + subsectionWithPath("_links.prev").description("Link to the previous page of task execution resources").type(JsonFieldType.OBJECT).optional(), + subsectionWithPath("page").description("Pagination properties")))); + } + + @Test + public void listTaskThinExecutions() throws Exception { + documentation.dontDocument(() -> this.mockMvc.perform( + post("/tasks/executions") + .param("name", "taskB") + .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .param("arguments", "--server.port=8080 --foo=bar") + ) + .andExpect(status().isCreated())); + + this.mockMvc.perform( + get("/tasks/thinexecutions") + .param("page", "1") + .param("size", "2")) + .andExpect(status().isOk()).andDo(this.documentationHandler.document( requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)")), + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)") + ), responseFields( - subsectionWithPath("_embedded.taskExecutionResourceList") - .description("Contains a collection of Task Executions/"), - subsectionWithPath("_links.self").description("Link to the task execution resource"), - subsectionWithPath("page").description("Pagination properties")))); + subsectionWithPath("_embedded.taskExecutionThinResourceList") + .description("Contains a collection of thin Task Executions/"), + subsectionWithPath("_links.self").description("Link to the task execution resource").type(JsonFieldType.OBJECT), + subsectionWithPath("_links.first").description("Link to the first page of task execution resources").type(JsonFieldType.OBJECT).optional(), + subsectionWithPath("_links.last").description("Link to the last page of task execution resources").type(JsonFieldType.OBJECT).optional(), + subsectionWithPath("_links.next").description("Link to the next page of task execution resources").type(JsonFieldType.OBJECT).optional(), + subsectionWithPath("_links.prev").description("Link to the previous page of task execution resources").type(JsonFieldType.OBJECT).optional(), + subsectionWithPath("page").description("Pagination properties")))); } @Test public void listTaskExecutionsByName() throws Exception { this.mockMvc.perform( - get("/tasks/executions") - .param("name", "taskB") - .param("page", "0") - .param("size", "10")) - .andDo(print()) + get("/tasks/executions") + .param("name", "taskB") + .param("page", "0") + .param("size", "10") + ) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("name") - .description("The name associated with the task execution")), - responseFields( - subsectionWithPath("_embedded.taskExecutionResourceList") - .description("Contains a collection of Task Executions/"), - subsectionWithPath("_links.self").description("Link to the task execution resource"), - subsectionWithPath("page").description("Pagination properties")))); + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)"), + parameterWithName("name") + .description("The name associated with the task execution")), + responseFields( + subsectionWithPath("_embedded.taskExecutionResourceList") + .description("Contains a collection of Task Executions/"), + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("page").description("Pagination properties")))); + } + + @Test + public void listTaskThinExecutionsByName() throws Exception { + this.mockMvc.perform( + get("/tasks/thinexecutions") + .param("name", "taskB") + .param("page", "0") + .param("size", "10") + ) + .andExpect(status().isOk()).andDo(this.documentationHandler.document( + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)"), + parameterWithName("name") + .description("The name associated with the task execution")), + responseFields( + subsectionWithPath("_embedded.taskExecutionThinResourceList") + .description("Contains a collection of Task Executions/"), + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("page").description("Pagination properties")))); } @Test public void stopTask() throws Exception { this.mockMvc.perform( - post("/tasks/executions") - .param("name", "taskA") - .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") - .param("arguments", "--server.port=8080 --foo=bar")) + post("/tasks/executions") + .param("name", "taskA") + .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .param("arguments", "--server.port=8080 --foo=bar") + ) .andExpect(status().isCreated()); this.mockMvc.perform( - post("/tasks/executions/{id}", 1) - .param("platform", "default")) - .andDo(print()) + post("/tasks/executions/{id}", 1) + .queryParam("schemaTarget", "boot2") + ) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - pathParameters( - parameterWithName("id").description("The ids of an existing task execution (required)") - ), - requestParameters(parameterWithName("platform") - .description("The platform associated with the task execution(optional)")))); + pathParameters( + parameterWithName("id").description("The ids of an existing task execution (required)") + ), + requestParameters( + parameterWithName("schemaTarget").description("The schemaTarget provided in Task execution detail. (optional)")) + ) + ); } @Test public void taskExecutionRemove() throws Exception { - documentation.dontDocument( () -> this.mockMvc.perform( - post("/tasks/executions") - .param("name", "taskB") - .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") - .param("arguments", "--server.port=8080 --foo=bar")) + documentation.dontDocument(() -> this.mockMvc.perform( + post("/tasks/executions") + .param("name", "taskB") + .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .param("arguments", "--server.port=8080 --foo=bar")) .andExpect(status().isCreated())); this.mockMvc.perform( - delete("/tasks/executions/{ids}?action=CLEANUP", "1")) - .andDo(print()) + delete("/tasks/executions/{ids}?action=CLEANUP", "1")) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( requestParameters(parameterWithName("action").description("Optional. Defaults to: CLEANUP.")), @@ -224,28 +384,47 @@ public void taskExecutionRemove() throws Exception { @Test public void taskExecutionRemoveAndTaskDataRemove() throws Exception { this.mockMvc.perform( - delete("/tasks/executions/{ids}?action=CLEANUP,REMOVE_DATA", "1,2")) - .andDo(print()) + delete("/tasks/executions/{ids}?schemaTarget=boot2&action=CLEANUP,REMOVE_DATA", "1,2")) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - requestParameters(parameterWithName("action").description("Using both actions CLEANUP and REMOVE_DATA simultaneously.")), + requestParameters( + parameterWithName("action").description("Using both actions CLEANUP and REMOVE_DATA simultaneously."), + parameterWithName("schemaTarget").description("Schema target for task. (optional)") + ), pathParameters(parameterWithName("ids") - .description("Providing 2 comma separated task execution id values.")) + .description("Providing 2 comma separated task execution id values.") + ) )); } - private void createTaskDefinition(String taskName) throws Exception{ - documentation.dontDocument( () -> this.mockMvc.perform( - post("/tasks/definitions") - .param("name", taskName) - .param("definition", "timestamp --format='yyyy MM dd'")) + private void createTaskDefinition(String taskName) throws Exception { + documentation.dontDocument(() -> this.mockMvc.perform( + post("/tasks/definitions") + .param("name", taskName) + .param("definition", "timestamp --format='yyyy MM dd'")) .andExpect(status().isOk())); } - - private void destroyTaskDefinition(String taskName) throws Exception{ - documentation.dontDocument( () -> this.mockMvc.perform( - delete("/tasks/definitions/{name}", taskName)) + private void cleanupTaskExecutions(String taskName) throws Exception { + documentation.dontDocument(() -> this.mockMvc.perform( + delete("/tasks/executions") + .queryParam("name", taskName) + ) + .andExpect(status().isOk())); + } + private void destroyTaskDefinition(String taskName) throws Exception { + documentation.dontDocument(() -> this.mockMvc.perform( + delete("/tasks/definitions/{name}", taskName)) .andExpect(status().isOk())); } + + private void executeTask(String taskName) throws Exception { + documentation.dontDocument(() -> + this.mockMvc.perform( + post("/tasks/executions") + .param("name", taskName) + .param("arguments", "--server.port=8080 --foo=bar") + ).andExpect(status().isCreated()) + ); + } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java index b5e78a9544..549df58818 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,12 +16,19 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import java.time.Duration; +import org.awaitility.Awaitility; +import org.junit.jupiter.api.MethodOrderer.MethodName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; + +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; import org.springframework.cloud.dataflow.core.ApplicationType; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository; +import org.springframework.cloud.dataflow.server.service.TaskExecutionService; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; @@ -34,8 +41,11 @@ * Documentation for the {@code /tasks/logs} endpoint. * * @author Ilayaperumal Gopinathan + * @author Glenn Renfro + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodName.class) public class TaskLogsDocumentation extends BaseDocumentation { @Test @@ -53,11 +63,16 @@ public void getLogsByTaskId() throws Exception { .andExpect(status().isCreated()); TaskDeploymentRepository taskDeploymentRepository = springDataflowServer.getWebApplicationContext().getBean(TaskDeploymentRepository.class); - Thread.sleep(30000); + TaskExecutionService service = springDataflowServer.getWebApplicationContext().getBean(TaskExecutionService.class); + AggregateExecutionSupport aggregateExecutionSupport = springDataflowServer.getWebApplicationContext().getBean(AggregateExecutionSupport.class); + TaskDefinitionReader taskDefinitionReader = springDataflowServer.getWebApplicationContext().getBean(TaskDefinitionReader.class); + SchemaVersionTarget schemaVersionTarget = aggregateExecutionSupport.findSchemaVersionTarget(taskName, taskDefinitionReader); + Awaitility.await().atMost(Duration.ofMillis(30000)).until(() -> service.getLog("default", + taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskName).getTaskDeploymentId(), + schemaVersionTarget.getName()).length() > 0); this.mockMvc.perform( get("/tasks/logs/"+taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskName) .getTaskDeploymentId()).param("platformName", "default")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( requestParameters( diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java index efb0cb7e34..249f3fb49a 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java @@ -16,9 +16,9 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.MethodOrderer.MethodName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.payload.PayloadDocumentation.responseFields; @@ -32,8 +32,10 @@ * Documentation for the /tasks/platforms endpoint. * * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodName.class) public class TaskPlatformDocumentation extends BaseDocumentation { @Test @@ -42,7 +44,6 @@ public void listTaskPlatforms() throws Exception { get("/tasks/platforms") .param("page", "0") .param("size", "10")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( requestParameters( diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java index f337d6cf89..653bc98cf6 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java @@ -16,11 +16,11 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.After; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer.MethodName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.dataflow.core.ApplicationType; @@ -39,17 +39,19 @@ * Documentation for the /tasks/schedules endpoint. * * @author Glenn Renfro + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) +@SuppressWarnings({"NewClassNamingConvention", "SameParameterValue"}) +@TestMethodOrder(MethodName.class) public class TaskSchedulerDocumentation extends BaseDocumentation { - @Before + @BeforeEach public void setup() throws Exception { registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); createTaskDefinition("mytaskname"); } - @After + @AfterEach public void tearDown() throws Exception { destroyTaskDefinition("mytaskname"); unregisterApp(ApplicationType.task, "timestamp"); @@ -61,12 +63,14 @@ public void createSchedule() throws Exception { post("/tasks/schedules") .param("scheduleName", "myschedule") .param("taskDefinitionName", "mytaskname") + .param("platform", "default") .param("properties", "scheduler.cron.expression=00 22 17 ? *") .param("arguments", "--foo=bar")) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document( requestParameters( parameterWithName("scheduleName").description("The name for the created schedule"), + parameterWithName("platform").description("The name of the platform the task is launched"), parameterWithName("taskDefinitionName") .description("The name of the task definition to be scheduled"), parameterWithName("properties") @@ -78,7 +82,6 @@ public void createSchedule() throws Exception { public void deleteSchedule() throws Exception { this.mockMvc.perform( delete("/tasks/schedules/{scheduleName}", "mytestschedule")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("scheduleName") @@ -91,7 +94,6 @@ public void listFilteredSchedules() throws Exception { get("/tasks/schedules/instances/{task-definition-name}", "FOO") .param("page", "0") .param("size", "10")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("task-definition-name") @@ -114,7 +116,6 @@ public void listAllSchedules() throws Exception { get("/tasks/schedules") .param("page", "0") .param("size", "10")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( requestParameters( diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskValidationDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskValidationDocumentation.java index cf462ce569..f9da2cdab9 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskValidationDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskValidationDocumentation.java @@ -16,11 +16,11 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.After; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer.MethodName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.dataflow.core.ApplicationType; @@ -38,17 +38,19 @@ * Documentation for the /tasks/validation endpoint. * * @author Glenn Renfro + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodName.class) public class TaskValidationDocumentation extends BaseDocumentation { - @Before + @BeforeEach public void setup() throws Exception { registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); createTaskDefinition("taskC"); } - @After + @AfterEach public void tearDown() throws Exception { destroyTaskDefinition("taskC"); unregisterApp(ApplicationType.task, "timestamp"); diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TasksInfoDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TasksInfoDocumentation.java index 82adf0363d..0c75070d25 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TasksInfoDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TasksInfoDocumentation.java @@ -16,11 +16,11 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.After; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer.MethodName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.dataflow.core.ApplicationType; @@ -37,11 +37,13 @@ * Documentation for the /tasks/info endpoint. * * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodName.class) public class TasksInfoDocumentation extends BaseDocumentation { - @Before + @BeforeEach public void setup() throws Exception { registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); createTaskDefinition("taskA"); @@ -49,7 +51,7 @@ public void setup() throws Exception { } - @After + @AfterEach public void tearDown() throws Exception { destroyTaskDefinition("taskA"); destroyTaskDefinition("taskB"); @@ -60,7 +62,6 @@ public void tearDown() throws Exception { public void getTaskExecutionsInfo() throws Exception { this.mockMvc.perform( get("/tasks/info/executions?completed=false")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( responseFields( diff --git a/spring-cloud-dataflow-classic-docs/src/test/resources/META-INF/build-info.properties b/spring-cloud-dataflow-classic-docs/src/test/resources/META-INF/build-info.properties new file mode 100644 index 0000000000..292487b111 --- /dev/null +++ b/spring-cloud-dataflow-classic-docs/src/test/resources/META-INF/build-info.properties @@ -0,0 +1,5 @@ +build.artifact=spring-cloud-dataflow-server +build.group=org.springframework.cloud +build.name=Spring Cloud Data Flow Server +build.time=2024-04-25T12\:36\:37.169Z +build.version=2.11.3-SNAPSHOT diff --git a/spring-cloud-dataflow-classic-docs/src/test/resources/git.properties b/spring-cloud-dataflow-classic-docs/src/test/resources/git.properties new file mode 100644 index 0000000000..3a17e69030 --- /dev/null +++ b/spring-cloud-dataflow-classic-docs/src/test/resources/git.properties @@ -0,0 +1,5 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=main +git.commit.id.abbrev=fddafed +git.commit.id.full=fddafed39b919981cbb5bd04bd7fb5266fa25309 +git.commit.time=2024-04-24T13\:35\:29+0200 diff --git a/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml b/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml index 061f02c72c..f73d9e6a8e 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml +++ b/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml @@ -8,6 +8,9 @@ spring: metrics: collector: uri: http://localhost:${fakeMetricsCollector.port} + deployer: + local: + maximumConcurrentTasks: 50 autoconfigure: exclude: >- org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration, @@ -20,7 +23,10 @@ spring: org.springframework.cloud.dataflow.shell.autoconfigure.BaseShellAutoConfiguration, org.springframework.cloud.dataflow.server.config.cloudfoundry.CloudFoundryTaskPlatformAutoConfiguration, org.springframework.cloud.dataflow.server.config.kubernetes.KubernetesTaskPlatformAutoConfiguration -maven: - remoteRepositories: - springRepo: - url: https://repo.spring.io/libs-snapshot +management: + info: + build: + enabled: true + git: + enabled: true + mode: full \ No newline at end of file diff --git a/spring-cloud-dataflow-common/README.md b/spring-cloud-dataflow-common/README.md new file mode 100644 index 0000000000..781b568e5f --- /dev/null +++ b/spring-cloud-dataflow-common/README.md @@ -0,0 +1 @@ +# spring-cloud-dataflow-common diff --git a/spring-cloud-dataflow-common/pom.xml b/spring-cloud-dataflow-common/pom.xml new file mode 100644 index 0000000000..24040613ff --- /dev/null +++ b/spring-cloud-dataflow-common/pom.xml @@ -0,0 +1,125 @@ + + + 4.0.0 + + spring-cloud-dataflow-common-parent + 2.11.6-SNAPSHOT + org.springframework.cloud + pom + + Spring Cloud Dataflow Common Parent + Common utilities sharing dataflow family + + + org.springframework.cloud + spring-cloud-dataflow-build + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-build + + + + 1.7.0 + 0.9.0 + 2.10.6 + + + + spring-cloud-dataflow-common-persistence + spring-cloud-dataflow-common-flyway + spring-cloud-dataflow-common-test-docker + spring-cloud-dataflow-common-test-docker-junit5 + spring-cloud-dataflow-common-dependencies + + + + + + com.jayway.awaitility + awaitility + ${jayway-awaitility.version} + + + com.github.zafarkhaja + java-semver + ${java-semver.version} + + + joda-time + joda-time + ${joda-time.version} + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + 1.8 + 1.8 + + + + + + + spring + true + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + + + + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-dependencies/pom.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-dependencies/pom.xml new file mode 100644 index 0000000000..a019a18563 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-dependencies/pom.xml @@ -0,0 +1,95 @@ + + + 4.0.0 + + spring-cloud-dataflow-dependencies-parent + org.springframework.cloud + 2.11.6-SNAPSHOT + ../../spring-cloud-dataflow-build/spring-cloud-dataflow-dependencies-parent + + spring-cloud-dataflow-common-dependencies + 2.11.6-SNAPSHOT + pom + Spring Cloud Dataflow Common Dependencies + Spring Cloud Dataflow Common Dependencies + + + + + + org.springframework.cloud + spring-cloud-dataflow-common-flyway + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-dataflow-common-test-docker + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-dataflow-common-test-docker-junit5 + ${dataflow.version} + + + + + + spring + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + + + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/pom.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/pom.xml new file mode 100644 index 0000000000..96ade97aa0 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/pom.xml @@ -0,0 +1,77 @@ + + + 4.0.0 + + spring-cloud-dataflow-common-flyway + jar + Spring Cloud Dataflow Common Flyway Support + Spring Cloud Dataflow Common Flyway Support + + org.springframework.cloud + spring-cloud-dataflow-common-parent + 2.11.6-SNAPSHOT + + + + + 8.5.13 + + + + + org.springframework.boot + spring-boot + + + org.springframework + spring-jdbc + + + org.flywaydb + flyway-core + + + org.flywaydb + flyway-mysql + + + org.flywaydb + flyway-sqlserver + + + org.slf4j + slf4j-api + + + org.springframework.cloud + spring-cloud-dataflow-common-persistence + ${dataflow.version} + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + + src/main/resources + true + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + 1.8 + 1.8 + + + + + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57Database.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57Database.java new file mode 100644 index 0000000000..d1d435e03a --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57Database.java @@ -0,0 +1,124 @@ +/* + * Copyright 2022-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.flywaydb.database.mysql; + +import java.sql.Connection; +import java.sql.SQLException; + +import org.flywaydb.core.api.MigrationVersion; +import org.flywaydb.core.api.configuration.Configuration; +import org.flywaydb.core.internal.database.base.Database; +import org.flywaydb.core.internal.database.base.Table; +import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory; +import org.flywaydb.core.internal.jdbc.StatementInterceptor; +import org.flywaydb.database.mysql.MySQLConnection; +import org.flywaydb.database.mysql.MySQLDatabase; +import org.flywaydb.database.mysql.mariadb.MariaDBDatabaseType; + +public class MySQL57Database extends Database { + + private final MySQLDatabase delegateDatabase; + + public MySQL57Database(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) { + this(configuration, jdbcConnectionFactory, statementInterceptor, new MySQLDatabase(configuration, jdbcConnectionFactory, statementInterceptor)); + } + + protected MySQL57Database(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor, MySQLDatabase delegateDatabase) { + super(configuration, jdbcConnectionFactory, statementInterceptor); + this.delegateDatabase = delegateDatabase; + } + + @Override + public String getRawCreateScript(Table table, boolean baseline) { + return delegateDatabase.getRawCreateScript(table, baseline); + } + + @Override + protected MySQLConnection doGetConnection(Connection connection) { + return delegateDatabase.doGetConnection(connection); + } + + @Override + protected MigrationVersion determineVersion() { + return delegateDatabase.determineVersion(); + } + + @Override + public final void ensureSupported() { + ensureDatabaseIsRecentEnough("5.1"); + if (databaseType instanceof MariaDBDatabaseType) { + ensureDatabaseNotOlderThanOtherwiseRecommendUpgradeToFlywayEdition("10.4", org.flywaydb.core.internal.license.Edition.ENTERPRISE); + recommendFlywayUpgradeIfNecessary("10.6"); + } else { + ensureDatabaseNotOlderThanOtherwiseRecommendUpgradeToFlywayEdition("5.7", org.flywaydb.core.internal.license.Edition.ENTERPRISE); + recommendFlywayUpgradeIfNecessary("8.0"); + } + } + + @Override + public void close() { + try { + super.close(); + } finally { + delegateDatabase.close(); + } + } + + @Override + protected String doGetCurrentUser() throws SQLException { + return delegateDatabase.doGetCurrentUser(); + } + + @Override + public boolean supportsDdlTransactions() { + return delegateDatabase.supportsDdlTransactions(); + } + + @Override + public boolean supportsChangingCurrentSchema() { + return delegateDatabase.supportsChangingCurrentSchema(); + } + + @Override + public String getBooleanTrue() { + return delegateDatabase.getBooleanTrue(); + } + + @Override + public String getBooleanFalse() { + return delegateDatabase.getBooleanFalse(); + } + + @Override + public String getOpenQuote() { + return delegateDatabase.getOpenQuote(); + } + + @Override + public String getCloseQuote() { + return delegateDatabase.getCloseQuote(); + } + + @Override + public boolean catalogIsSchema() { + return delegateDatabase.catalogIsSchema(); + } + + @Override + public boolean useSingleConnection() { + return delegateDatabase.useSingleConnection(); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57DatabaseType.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57DatabaseType.java new file mode 100644 index 0000000000..04b39f74bf --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57DatabaseType.java @@ -0,0 +1,34 @@ +/* + * Copyright 2022-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.flywaydb.database.mysql; + +import org.flywaydb.core.api.configuration.Configuration; +import org.flywaydb.core.internal.database.base.Database; +import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory; +import org.flywaydb.core.internal.jdbc.StatementInterceptor; + +public class MySQL57DatabaseType extends MySQLDatabaseType { + + @Override + public Database createDatabase(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) { + return new MySQL57Database(configuration, jdbcConnectionFactory, statementInterceptor); + } + + @Override + public int getPriority() { + return super.getPriority() + 1; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57Database.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57Database.java new file mode 100644 index 0000000000..203cd7d907 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57Database.java @@ -0,0 +1,29 @@ +/* + * Copyright 2022-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.flywaydb.database.mysql.mariadb; + +import org.flywaydb.core.api.configuration.Configuration; +import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory; +import org.flywaydb.core.internal.jdbc.StatementInterceptor; +import org.flywaydb.database.mysql.MySQL57Database; +import org.flywaydb.database.mysql.mariadb.MariaDBDatabase; + +public class MariaDB57Database extends MySQL57Database { + + public MariaDB57Database(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) { + super(configuration, jdbcConnectionFactory, statementInterceptor, new MariaDBDatabase(configuration, jdbcConnectionFactory, statementInterceptor)); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57DatabaseType.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57DatabaseType.java new file mode 100644 index 0000000000..644e420895 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57DatabaseType.java @@ -0,0 +1,34 @@ +/* + * Copyright 2022-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.flywaydb.database.mysql.mariadb; + +import org.flywaydb.core.api.configuration.Configuration; +import org.flywaydb.core.internal.database.base.Database; +import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory; +import org.flywaydb.core.internal.jdbc.StatementInterceptor; + +public class MariaDB57DatabaseType extends MariaDBDatabaseType { + + @Override + public Database createDatabase(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) { + return new MariaDB57Database(configuration, jdbcConnectionFactory, statementInterceptor); + } + + @Override + public int getPriority() { + return super.getPriority() + 2; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractCallback.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractCallback.java new file mode 100644 index 0000000000..58050cf9bb --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractCallback.java @@ -0,0 +1,102 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.flyway; + +import java.util.List; + +import org.flywaydb.core.api.callback.Callback; +import org.flywaydb.core.api.callback.Context; +import org.flywaydb.core.api.callback.Event; + +import org.springframework.jdbc.BadSqlGrammarException; +import org.springframework.util.ObjectUtils; + +/** + * Base implementation providing some shared features for java based callbacks. + * + * @author Janne Valkealahti + * + */ +public abstract class AbstractCallback implements Callback { + + private final Event event; + private final List commands; + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + /** + * Instantiates a new abstract callback. + * + * @param event the event to hook into + */ + public AbstractCallback(Event event) { + this(event, null); + } + + /** + * Instantiates a new abstract callback. + * + * @param event the event to hook into + * @param commands the sql commands to run + */ + public AbstractCallback(Event event, List commands) { + this.event = event; + this.commands = commands; + } + + @Override + public boolean supports(Event event, Context context) { + return ObjectUtils.nullSafeEquals(this.event, event); + } + + @Override + public boolean canHandleInTransaction(Event event, Context context) { + return true; + } + + @Override + public void handle(Event event, Context context) { + try { + runner.execute(context.getConnection(), getCommands(event, context)); + } + catch(Exception sqe) { + if (sqe instanceof BadSqlGrammarException) { + throw new DataFlowSchemaMigrationException( + "An exception occured during migration. This may indicate " + + "that you have run Spring Batch Jobs or Spring Cloud " + + "Tasks prior to running Spring Cloud Data Flow first. " + + "Data Flow must create these tables.", sqe); + + } + throw sqe; + } + } + + @Override + public String getCallbackName() { + return ""; + } + + /** + * Gets the commands. + * + * @param event the event + * @param context the context + * @return the commands + */ + public List getCommands(Event event, Context context) { + return commands; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractMigration.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractMigration.java new file mode 100644 index 0000000000..8625692121 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractMigration.java @@ -0,0 +1,57 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.flyway; + +import java.util.List; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +/** + * Base implementation providing some shared features for java based migrations. + * + * @author Janne Valkealahti + * + */ +public abstract class AbstractMigration extends BaseJavaMigration { + + private final List commands; + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + /** + * Instantiates a new abstract migration. + * + * @param commands the commands + */ + public AbstractMigration(List commands) { + super(); + this.commands = commands; + } + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), getCommands()); + } + + /** + * Gets the commands. + * + * @return the commands + */ + public List getCommands() { + return commands; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DataFlowSchemaMigrationException.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DataFlowSchemaMigrationException.java new file mode 100644 index 0000000000..c59f339de4 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DataFlowSchemaMigrationException.java @@ -0,0 +1,45 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.common.flyway; + +/** + * Exception is thrown when an error occurs while migrating the dataflow schema. + * + * @author Glenn Renfro + */ +public class DataFlowSchemaMigrationException extends RuntimeException{ + + private static final long serialVersionUID = 2000527476523962349L; + + /** + * Exception will use the message specified. + * + * @param message the text that will be associated with the exception. + * @param throwable the exception that is being wrapped. + */ + public DataFlowSchemaMigrationException(String message, Throwable throwable) { + super(message, throwable); + } + + /** + * Exception will use the message specified. + * @param message the text that will be associated with the exception. + */ + public DataFlowSchemaMigrationException(String message) { + super(message); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DatabaseDriverUtils.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DatabaseDriverUtils.java new file mode 100644 index 0000000000..cf5ab9a466 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DatabaseDriverUtils.java @@ -0,0 +1,69 @@ +/* + * Copyright 2022-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.flyway; + +import java.sql.DatabaseMetaData; + +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.boot.jdbc.DatabaseDriver; +import org.springframework.jdbc.support.JdbcUtils; +import org.springframework.jdbc.support.MetaDataAccessException; + +/** + * Provides utility methods to help with {@link DatabaseDriver} related operations. + */ +public final class DatabaseDriverUtils { + + private static final Logger LOG = LoggerFactory.getLogger(DatabaseDriverUtils.class); + + private DatabaseDriverUtils() { + } + + /** + * Finds a database driver suitable for a datasource. + *

By default, the jdbc url reported from the database metdata is used to determine + * the driver. It also handles the special case where MariaDB reports a 'jdbc:maria' + * url eventhough the original url was prefixed with 'jdbc:mysql'. + * + * @param dataSource the datasource to inspect + * @return a database driver suitable for the datasource + */ + public static DatabaseDriver getDatabaseDriver(DataSource dataSource) { + // copied from boot's flyway auto-config to get matching db vendor id (but adjusted + // to handle the case when MariaDB driver is being used against MySQL database). + try { + String url = JdbcUtils.extractDatabaseMetaData(dataSource, DatabaseMetaData::getURL); + DatabaseDriver databaseDriver = DatabaseDriver.fromJdbcUrl(url); + if (databaseDriver == DatabaseDriver.MARIADB) { + // MariaDB reports a 'jdbc:maria' url even when user specified 'jdbc:mysql'. + // Verify the underlying database is not really MySql. + String product = JdbcUtils.extractDatabaseMetaData(dataSource, DatabaseMetaData::getDatabaseProductName); + if (DatabaseDriver.MYSQL.name().equalsIgnoreCase(product)) { + LOG.info("Using MariaDB driver against MySQL database - will use MySQL"); + databaseDriver = DatabaseDriver.MYSQL; + } + } + return databaseDriver; + } + catch (MetaDataAccessException ex) { + throw new IllegalStateException(ex); + } + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializer.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializer.java new file mode 100644 index 0000000000..aa98aec176 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializer.java @@ -0,0 +1,98 @@ +/* + * Copyright 2022-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.common.flyway; + +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.boot.env.EnvironmentPostProcessor; +import org.springframework.context.ApplicationContextInitializer; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.core.Ordered; +import org.springframework.core.env.ConfigurableEnvironment; +import org.springframework.core.env.MapPropertySource; + +/** + * An {@link ApplicationContextInitializer} that replaces any configured 'spring.flyways.locations' + * properties that contain the '{vendor}' token with 'mysql' when using the MariaDB driver + * to access a MySQL database. + * + *

Typically property manipulation like this is implemented as an {@link EnvironmentPostProcessor} but + * in order to work with applications that are using Config server it must be a context initializer + * so it can run after the {@code org.springframework.cloud.bootstrap.config.PropertySourceBootstrapConfiguration} + * context initializer. + * + * @author Chris Bono + */ +public class FlywayVendorReplacingApplicationContextInitializer implements + ApplicationContextInitializer, Ordered { + + private final Logger log = LoggerFactory.getLogger(FlywayVendorReplacingApplicationContextInitializer.class); + + @Override + public void initialize(ConfigurableApplicationContext applicationContext) { + + ConfigurableEnvironment env = applicationContext.getEnvironment(); + + // If there is a spring.datasource.url prefixed w/ "jdbc:mysql:" and using the MariaDB driver then replace {vendor} + boolean usingMariaDriver = env.getProperty("spring.datasource.driver-class-name", "").equals("org.mariadb.jdbc.Driver"); + boolean usingMySqlUrl = env.getProperty("spring.datasource.url", "").startsWith("jdbc:mysql:"); + if (!(usingMariaDriver && usingMySqlUrl)) { + return; + } + + log.info("Using MariaDB driver w/ MySQL url - looking for '{vendor}' in 'spring.flyway.locations'"); + + // Look for spring.flyway.locations[0..N] and if found then override it w/ vendor replaced version + Map replacedLocations = new HashMap<>(); + + int prodIdx = 0; + while (true) { + String locationPropName = String.format("spring.flyway.locations[%d]", prodIdx++); + String configuredLocation = env.getProperty(locationPropName); + if (configuredLocation == null) { + break; + } + if (configuredLocation.contains("{vendor}")) { + String replaceLocation = configuredLocation.replace("{vendor}", "mysql"); + replacedLocations.put(locationPropName, replaceLocation); + } + } + + if (replacedLocations.isEmpty()) { + log.info("No properties with '{vendor}' found to replace"); + return; + } + + log.info("Replacing '{vendor}' in {}", replacedLocations); + + env.getPropertySources().addFirst(new MapPropertySource("overrideVendorInFlywayLocations", replacedLocations)); + } + + /** + * The precedence for execution order - should execute last. + * + * @return lowest precedence to ensure it executes after other initializers + */ + @Override + public int getOrder() { + return Ordered.LOWEST_PRECEDENCE; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommand.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommand.java new file mode 100644 index 0000000000..9e5b94b9aa --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommand.java @@ -0,0 +1,119 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.flyway; + +import java.sql.Connection; +import java.util.Collections; +import java.util.List; + +import org.springframework.jdbc.core.JdbcTemplate; + +/** + * Class keeping a sql command and its possible suppressing sql codes together. + * + * @author Janne Valkealahti + * + */ +public class SqlCommand { + + private final String command; + private final List suppressedErrorCodes; + + /** + * Convenience method returning new instance. + * + * @param command the command + * @return the sql command + */ + public static SqlCommand from(String command) { + return new SqlCommand(command, null); + } + + /** + * Convenience method returning new instance. + * + * @param command the command + * @param suppressedErrorCode the suppressed error code + * @return the sql command + */ + public static SqlCommand from(String command, int suppressedErrorCode) { + return new SqlCommand(command, suppressedErrorCode); + } + + public SqlCommand() { + this(null, null); + } + + /** + * Instantiates a new sql command. + * + * @param command the command + * @param suppressedErrorCode the suppressed error code + */ + public SqlCommand(String command, int suppressedErrorCode) { + this(command, Collections.singletonList(suppressedErrorCode)); + } + + /** + * Instantiates a new sql command. + * + * @param command the command + * @param suppressedErrorCodes the suppressed error codes + */ + public SqlCommand(String command, List suppressedErrorCodes) { + this.command = command; + this.suppressedErrorCodes = suppressedErrorCodes; + } + + /** + * Gets the command. + * + * @return the command + */ + public String getCommand() { + return command; + } + + /** + * Gets the suppressed error codes. + * + * @return the suppressed error codes + */ + public List getSuppressedErrorCodes() { + return suppressedErrorCodes; + } + + /** + * Checks if this command can handle execution directly + * in a given jdbc template. + * + * @return true, if command can handle jdbc template + */ + public boolean canHandleInJdbcTemplate() { + return false; + } + + /** + * Handle command in a given jdbc template. + * + * @param jdbcTemplate the jdbc template + * @param connection the sql connection + */ + public void handle(JdbcTemplate jdbcTemplate, Connection connection) { + // expected to get handled in a sub-class + throw new UnsupportedOperationException("Not supported in a base class"); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommandsRunner.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommandsRunner.java new file mode 100644 index 0000000000..7e1e42989d --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommandsRunner.java @@ -0,0 +1,71 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.flyway; + +import java.sql.Connection; +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.SingleConnectionDataSource; +import org.springframework.jdbc.support.SQLExceptionTranslator; +import org.springframework.util.ObjectUtils; + +/** + * Simple utility class to run commands with a connection and possibly suppress + * errors. + * + * @author Janne Valkealahti + * + */ +public class SqlCommandsRunner { + + private static final Logger logger = LoggerFactory.getLogger(SqlCommandsRunner.class); + + /** + * Execute list of {@code SqlCommand} by suppressing errors if those are given + * with a command. + * + * @param connection the connection + * @param commands the sql commands + */ + public void execute(Connection connection, List commands) { + JdbcTemplate jdbcTemplate = new JdbcTemplate(new SingleConnectionDataSource(connection, true)); + SQLExceptionTranslator origExceptionTranslator = jdbcTemplate.getExceptionTranslator(); + + for (SqlCommand command : commands) { + if (command.canHandleInJdbcTemplate()) { + command.handle(jdbcTemplate, connection); + } + else { + if(!ObjectUtils.isEmpty(command.getSuppressedErrorCodes())) { + jdbcTemplate.setExceptionTranslator(new SuppressSQLErrorCodesTranslator(command.getSuppressedErrorCodes())); + } + try { + logger.debug("Executing command {}", command.getCommand()); + jdbcTemplate.execute(command.getCommand()); + } catch (SuppressDataAccessException e) { + logger.debug("Suppressing error {}", e); + } + // restore original translator in case next command + // doesn't define suppressing codes. + jdbcTemplate.setExceptionTranslator(origExceptionTranslator); + } + } + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressDataAccessException.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressDataAccessException.java new file mode 100644 index 0000000000..28098afbc7 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressDataAccessException.java @@ -0,0 +1,39 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.flyway; + +import org.springframework.dao.DataAccessException; + +/** + * Special type of {@link DataAccessException} indicating that error can be + * suppressed. + * + * @author Janne Valkealahti + * + */ +@SuppressWarnings("serial") +public class SuppressDataAccessException extends DataAccessException { + + /** + * Instantiates a new suppress data access exception. + * + * @param msg the msg + * @param cause the cause + */ + public SuppressDataAccessException(String msg, Throwable cause) { + super(msg, cause); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressSQLErrorCodesTranslator.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressSQLErrorCodesTranslator.java new file mode 100644 index 0000000000..385daa4905 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressSQLErrorCodesTranslator.java @@ -0,0 +1,68 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.flyway; + +import java.sql.SQLException; +import java.util.Arrays; +import java.util.List; + +import org.slf4j.LoggerFactory; + +import org.springframework.dao.DataAccessException; +import org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator; +import org.springframework.util.Assert; + +/** + * {@link SQLErrorCodeSQLExceptionTranslator} suppressing errors based on + * configured list of codes by throwing dedicated {@link SuppressDataAccessException}. + * + * @author Janne Valkealahti + * + */ +public class SuppressSQLErrorCodesTranslator extends SQLErrorCodeSQLExceptionTranslator { + + private static final org.slf4j.Logger logger = LoggerFactory.getLogger(SuppressSQLErrorCodesTranslator.class); + private final List errorCodes; + + /** + * Instantiates a new suppress SQL error codes translator. + * + * @param errorCode the error code + */ + public SuppressSQLErrorCodesTranslator(int errorCode) { + this(Arrays.asList(errorCode)); + } + + /** + * Instantiates a new suppress SQL error codes translator. + * + * @param errorCodes the error codes + */ + public SuppressSQLErrorCodesTranslator(List errorCodes) { + super(); + Assert.notNull(errorCodes, "errorCodes must be set"); + this.errorCodes = errorCodes; + } + + @Override + protected DataAccessException customTranslate(String task, String sql, SQLException sqlEx) { + logger.debug("Checking sql error code {} against {}", sqlEx.getErrorCode(), errorCodes); + if (errorCodes.contains(sqlEx.getErrorCode())) { + return new SuppressDataAccessException(task, sqlEx); + } + return super.customTranslate(task, sql, sqlEx); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/META-INF/services/org.flywaydb.core.extensibility.Plugin b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/META-INF/services/org.flywaydb.core.extensibility.Plugin new file mode 100644 index 0000000000..ccab893acc --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/META-INF/services/org.flywaydb.core.extensibility.Plugin @@ -0,0 +1,2 @@ +org.flywaydb.database.mysql.mariadb.MariaDB57DatabaseType +org.flywaydb.database.mysql.MySQL57DatabaseType diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/org/flywaydb/database/version.txt b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/org/flywaydb/database/version.txt new file mode 100644 index 0000000000..a4a0d40d19 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/org/flywaydb/database/version.txt @@ -0,0 +1 @@ +@flyway.version@ diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/test/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializerTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/test/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializerTests.java new file mode 100644 index 0000000000..28efc74cb0 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/test/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializerTests.java @@ -0,0 +1,131 @@ +/* + * Copyright 2022-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.common.flyway; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Named; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import org.springframework.boot.test.context.runner.ApplicationContextRunner; +import org.springframework.core.env.ConfigurableEnvironment; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.params.provider.Arguments.arguments; + +/** + * Tests for {@link FlywayVendorReplacingApplicationContextInitializer}. + */ +public class FlywayVendorReplacingApplicationContextInitializerTests { + + @ParameterizedTest(name = "{0}") + @MethodSource("vendorReplacedProperlyProvider") + void vendorReplacedProperly(boolean usingMySqlUrl, boolean usingMariaDriver, List configuredLocationProps, List finalLocationProps) { + List props = new ArrayList<>(); + props.add("spring.datasource.url=" + (usingMySqlUrl ? "jdbc:mysql://localhost:3306/dataflow?permitMysqlScheme" : "jdbc:mariadb://localhost:3306/dataflow")); + props.add("spring.datasource.driver-class-name=" + (usingMariaDriver ? "org.mariadb.jdbc.Driver" : "org.mysql.jdbc.Driver")); + props.addAll(configuredLocationProps); + + // Prime an actual env by running it through the AppContextRunner with the configured properties + new ApplicationContextRunner().withPropertyValues(props.toArray(new String[0])).run((context) -> { + ConfigurableEnvironment env = context.getEnvironment(); + + // Sanity check the locations props are as expected + configuredLocationProps.forEach((location) -> { + String key = location.split("=")[0]; + String value = location.split("=")[1]; + assertThat(env.getProperty(key)).isEqualTo(value); + }); + + // Run the env through the ACI + FlywayVendorReplacingApplicationContextInitializer flywayVendorReplacingInitializer = new FlywayVendorReplacingApplicationContextInitializer(); + flywayVendorReplacingInitializer.initialize(context); + + // Verify they are replaced as expected + finalLocationProps.forEach((location) -> { + String key = location.split("=")[0]; + String value = location.split("=")[1]; + assertThat(env.getProperty(key)).isEqualTo(value); + }); + }); + } + + private static Stream vendorReplacedProperlyProvider() { + return Stream.of( + arguments(Named.of("singleLocationWithVendor",true), true, + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}"), + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/mysql") + ), + arguments(Named.of("singleLocationWithoutVendor",true), true, + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/foo"), + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/foo") + ), + arguments(Named.of("noLocations",true), true, + Collections.emptyList(), + Collections.emptyList() + ), + arguments(Named.of("multiLocationsAllWithVendor",true), true, + Arrays.asList( + "spring.flyway.locations[0]=classpath:org/skipper/db0/{vendor}", + "spring.flyway.locations[1]=classpath:org/skipper/db1/{vendor}", + "spring.flyway.locations[2]=classpath:org/skipper/db2/{vendor}"), + Arrays.asList( + "spring.flyway.locations[0]=classpath:org/skipper/db0/mysql", + "spring.flyway.locations[1]=classpath:org/skipper/db1/mysql", + "spring.flyway.locations[2]=classpath:org/skipper/db2/mysql") + ), + arguments(Named.of("multiLocationsSomeWithVendor",true), true, + Arrays.asList( + "spring.flyway.locations[0]=classpath:org/skipper/db0/{vendor}", + "spring.flyway.locations[1]=classpath:org/skipper/db1/foo", + "spring.flyway.locations[2]=classpath:org/skipper/db2/{vendor}"), + Arrays.asList( + "spring.flyway.locations[0]=classpath:org/skipper/db0/mysql", + "spring.flyway.locations[1]=classpath:org/skipper/db1/foo", + "spring.flyway.locations[2]=classpath:org/skipper/db2/mysql") + ), + arguments(Named.of("multiLocationsNoneWithVendor",true), true, + Arrays.asList( + "spring.flyway.locations[0]=classpath:org/skipper/db0/foo", + "spring.flyway.locations[1]=classpath:org/skipper/db1/bar", + "spring.flyway.locations[2]=classpath:org/skipper/db2/zaa"), + Arrays.asList( + "spring.flyway.locations[0]=classpath:org/skipper/db0/foo", + "spring.flyway.locations[1]=classpath:org/skipper/db1/bar", + "spring.flyway.locations[2]=classpath:org/skipper/db2/zaa") + ), + arguments(Named.of("mariaUrlWithMariaDriverDoesNotReplace",false), true, + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}"), + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}") + ), + arguments(Named.of("mysqlUrlWithMysqlDriverDoesNotReplace",true), false, + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}"), + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}") + ), + arguments(Named.of("mariaUrlMysqlDriverDoesNotReplace",false), false, + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}"), + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}") + ) + ); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/pom.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/pom.xml new file mode 100644 index 0000000000..ade4f2742d --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/pom.xml @@ -0,0 +1,79 @@ + + + 4.0.0 + + spring-cloud-dataflow-parent + org.springframework.cloud + 2.11.6-SNAPSHOT + ../../spring-cloud-dataflow-parent + + spring-cloud-dataflow-common-persistence + spring-cloud-dataflow-audit + Spring Cloud Data Flow Common Persistence Utilities + jar + + true + 3.4.1 + + + + org.hibernate + hibernate-core + + + org.springframework.data + spring-data-jpa + + + org.springframework.boot + spring-boot-starter-jdbc + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseAwareLobType.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseAwareLobType.java new file mode 100644 index 0000000000..52f2fdf0c8 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseAwareLobType.java @@ -0,0 +1,63 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.persistence.type; + +import org.hibernate.type.AbstractSingleColumnStandardBasicType; +import org.hibernate.type.descriptor.java.StringTypeDescriptor; +import org.hibernate.type.descriptor.sql.ClobTypeDescriptor; +import org.hibernate.type.descriptor.sql.SqlTypeDescriptor; +import org.hibernate.type.descriptor.sql.VarcharTypeDescriptor; + +import org.springframework.util.Assert; + +/** + * Provide for Hibernate and Postgres incompatibility for columns of type text. + * @author Corneil du Plessis + */ +public class DatabaseAwareLobType extends AbstractSingleColumnStandardBasicType { + + public static final DatabaseAwareLobType INSTANCE = new DatabaseAwareLobType(); + + + + public DatabaseAwareLobType() { + super( getDbDescriptor(), StringTypeDescriptor.INSTANCE ); + } + + public static SqlTypeDescriptor getDbDescriptor() { + if( isPostgres() ) { + return VarcharTypeDescriptor.INSTANCE; + } + else { + return ClobTypeDescriptor.DEFAULT; + } + } + + /** + * This method will be used to set an indicator that the database driver in use is PostgreSQL. + * @param postgresDB true if PostgreSQL. + */ + private static boolean isPostgres() { + Boolean postgresDatabase = DatabaseTypeAwareInitializer.getPostgresDatabase(); + Assert.notNull(postgresDatabase, "Expected postgresDatabase to be set"); + return postgresDatabase; + } + + @Override + public String getName() { + return "database_aware_lob"; + } +} \ No newline at end of file diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseTypeAwareInitializer.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseTypeAwareInitializer.java new file mode 100644 index 0000000000..c67454afe2 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseTypeAwareInitializer.java @@ -0,0 +1,50 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.persistence.type; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.context.ApplicationContextInitializer; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.core.Ordered; +import org.springframework.core.env.ConfigurableEnvironment; + +public class DatabaseTypeAwareInitializer implements ApplicationContextInitializer, Ordered { + private static final Logger logger = LoggerFactory.getLogger(DatabaseTypeAwareInitializer.class); + private static Boolean postgresDatabase = null; + + public DatabaseTypeAwareInitializer() { + } + + @Override + public void initialize(ConfigurableApplicationContext applicationContext) { + ConfigurableEnvironment env = applicationContext.getEnvironment(); + String property = env.getProperty("spring.datasource.driver-class-name", ""); + logger.info("checking database driver type:{}", property); + postgresDatabase = property.contains("postgres"); + } + + public static Boolean getPostgresDatabase() { + return postgresDatabase; + } + + @Override + public int getOrder() { + return Ordered.HIGHEST_PRECEDENCE; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/resources/META-INF/spring.factories b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/resources/META-INF/spring.factories new file mode 100644 index 0000000000..189a0a6f71 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/resources/META-INF/spring.factories @@ -0,0 +1 @@ +org.springframework.context.ApplicationContextInitializer=org.springframework.cloud.dataflow.common.persistence.type.DatabaseTypeAwareInitializer \ No newline at end of file diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/test/java/org/springframework/cloud/dataflow/common/persistence/DatabaseTypeAwareInitializerTest.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/test/java/org/springframework/cloud/dataflow/common/persistence/DatabaseTypeAwareInitializerTest.java new file mode 100644 index 0000000000..cbf787d332 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/test/java/org/springframework/cloud/dataflow/common/persistence/DatabaseTypeAwareInitializerTest.java @@ -0,0 +1,35 @@ +package org.springframework.cloud.dataflow.common.persistence; + +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.boot.test.context.runner.ApplicationContextRunner; +import org.springframework.cloud.dataflow.common.persistence.type.DatabaseTypeAwareInitializer; +import org.springframework.core.env.ConfigurableEnvironment; + +import static org.assertj.core.api.Assertions.assertThat; +public class DatabaseTypeAwareInitializerTest { + private static final Logger logger = LoggerFactory.getLogger(DatabaseTypeAwareInitializerTest.class); + @Test + public void testInitPostgres() { + initDriverType("org.postgresql.Driver"); + assertThat(DatabaseTypeAwareInitializer.getPostgresDatabase()).isNotNull(); + assertThat(DatabaseTypeAwareInitializer.getPostgresDatabase()).isTrue(); + } + @Test + public void testInitMariaDB() { + initDriverType("org.mariadb.jdbc.Driver"); + assertThat(DatabaseTypeAwareInitializer.getPostgresDatabase()).isNotNull(); + assertThat(DatabaseTypeAwareInitializer.getPostgresDatabase()).isFalse(); + } + private void initDriverType(String driverClassName) { + // Prime an actual env by running it through the AppContextRunner with the configured properties + new ApplicationContextRunner().withPropertyValues("spring.datasource.driver-class-name=" + driverClassName).run((context) -> { + ConfigurableEnvironment env = context.getEnvironment(); + logger.info("spring.datasource.driver-class-name={}", env.getProperty("spring.datasource.driver-class-name")); + DatabaseTypeAwareInitializer initializer = new DatabaseTypeAwareInitializer(); + initializer.initialize(context); + }); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/pom.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/pom.xml new file mode 100644 index 0000000000..752668707e --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/pom.xml @@ -0,0 +1,32 @@ + + + 4.0.0 + + spring-cloud-dataflow-common-test-docker-junit5 + jar + + Spring Cloud Dataflow Common Docker Test JUnit5 Support + Utilities to help using junit5 + + + org.springframework.cloud + spring-cloud-dataflow-common-parent + 2.11.6-SNAPSHOT + + + true + + + + org.springframework.cloud + spring-cloud-dataflow-common-test-docker + ${project.version} + + + org.springframework.boot + spring-boot-starter-test + + + + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose.java new file mode 100644 index 0000000000..3c4be95dd4 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose.java @@ -0,0 +1,42 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Repeatable; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.extension.ExtendWith; + +@Documented +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.TYPE, ElementType.METHOD}) +@Repeatable(DockerComposes.class) +@ExtendWith(DockerComposeExtension.class) +public @interface DockerCompose { + + int order() default 0; + String id() default ""; + boolean start() default true; + String[] locations() default {}; + String[] services() default {}; + String log() default ""; +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeCluster.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeCluster.java new file mode 100644 index 0000000000..81b635c156 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeCluster.java @@ -0,0 +1,25 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import org.springframework.cloud.dataflow.common.test.docker.compose.DockerComposeRule; + +public interface DockerComposeCluster { + + DockerComposeRule getRule(); + void start(); + void stop(); +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeExtension.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeExtension.java new file mode 100644 index 0000000000..ec5e6d55da --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeExtension.java @@ -0,0 +1,156 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import java.lang.reflect.Method; +import java.util.List; + +import org.junit.jupiter.api.extension.AfterAllCallback; +import org.junit.jupiter.api.extension.AfterEachCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.BeforeEachCallback; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ExtensionContext.Namespace; +import org.junit.jupiter.api.extension.ExtensionContext.Store; +import org.junit.jupiter.api.extension.ParameterContext; +import org.junit.jupiter.api.extension.ParameterResolutionException; +import org.junit.jupiter.api.extension.ParameterResolver; +import org.junit.platform.commons.util.AnnotationUtils; +import org.springframework.cloud.dataflow.common.test.docker.compose.DockerComposeRule; +import org.springframework.cloud.dataflow.common.test.docker.junit5.DockerComposeManager.DockerComposeData; + +/** + * {@code JUnit5} extension handling docker compose integration. + * + * @author Janne Valkealahti + * + */ +public class DockerComposeExtension + implements BeforeAllCallback, BeforeEachCallback, AfterAllCallback, AfterEachCallback, ParameterResolver { + + private static final Namespace NAMESPACE = Namespace.create(DockerComposeExtension.class); + + @Override + public void beforeAll(ExtensionContext extensionContext) throws Exception { + // add class level compose info into compose manager + DockerComposeManager dockerComposeManager = getDockerComposeManager(extensionContext); + + Class testClass = extensionContext.getRequiredTestClass(); + String classKey = extensionContext.getRequiredTestClass().getSimpleName(); + + List dockerComposeAnnotations = AnnotationUtils.findRepeatableAnnotations(testClass, DockerCompose.class); + for (DockerCompose dockerComposeAnnotation : dockerComposeAnnotations) { + DockerComposeData dockerComposeData = new DockerComposeData(dockerComposeAnnotation.id(), + dockerComposeAnnotation.locations(), dockerComposeAnnotation.services(), + dockerComposeAnnotation.log(), dockerComposeAnnotation.start(), dockerComposeAnnotation.order()); + dockerComposeManager.addClassDockerComposeData(classKey, dockerComposeData); + } + } + + @Override + public void beforeEach(ExtensionContext context) throws Exception { + // add method level compose info into compose manager + DockerComposeManager dockerComposeManager = getDockerComposeManager(context); + + Method testMethod = context.getRequiredTestMethod(); + String classKey = context.getRequiredTestClass().getSimpleName(); + String methodKey = context.getRequiredTestMethod().getName(); + + List dockerComposeAnnotations = AnnotationUtils.findRepeatableAnnotations(testMethod, DockerCompose.class); + for (DockerCompose dockerComposeAnnotation : dockerComposeAnnotations) { + DockerComposeData dockerComposeData = new DockerComposeData(dockerComposeAnnotation.id(), + dockerComposeAnnotation.locations(), dockerComposeAnnotation.services(), + dockerComposeAnnotation.log(), dockerComposeAnnotation.start(), dockerComposeAnnotation.order()); + dockerComposeManager.addMethodDockerComposeData(classKey, methodKey, dockerComposeData); + } + dockerComposeManager.build(classKey, methodKey); + } + + @Override + public void afterEach(ExtensionContext context) throws Exception { + // clean containers related to class and method + DockerComposeManager dockerComposeManager = getDockerComposeManager(context); + String classKey = context.getRequiredTestClass().getSimpleName(); + String methodKey = context.getRequiredTestMethod().getName(); + dockerComposeManager.stop(classKey, methodKey); + } + + @Override + public void afterAll(ExtensionContext context) throws Exception { + } + + @Override + public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + return (parameterContext.getParameter().getType() == DockerComposeInfo.class); + } + + @Override + public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + DockerComposeManager dockerComposeManager = getDockerComposeManager(extensionContext); + return new DefaultDockerComposeInfo(dockerComposeManager); + } + + private static DockerComposeManager getDockerComposeManager(ExtensionContext context) { + Class testClass = context.getRequiredTestClass(); + Store store = getStore(context); + return store.getOrComputeIfAbsent(testClass, (key)->{return new DockerComposeManager();}, DockerComposeManager.class); + } + + private static Store getStore(ExtensionContext context) { + return context.getRoot().getStore(NAMESPACE); + } + + private static class DefaultDockerComposeInfo implements DockerComposeInfo { + private final DockerComposeManager dockerComposeManager; + + public DefaultDockerComposeInfo(DockerComposeManager dockerComposeManager) { + this.dockerComposeManager = dockerComposeManager; + } + + @Override + public DockerComposeCluster id(String id) { + return new DefaultDockerComposeCluster(dockerComposeManager, id); + } + } + + private static class DefaultDockerComposeCluster implements DockerComposeCluster { + + private final DockerComposeManager dockerComposeManager; + private final String id; + + public DefaultDockerComposeCluster(DockerComposeManager dockerComposeManager, String id) { + this.dockerComposeManager = dockerComposeManager; + this.id = id; + } + + @Override + public DockerComposeRule getRule() { + return dockerComposeManager.getRule(id); + } + + @Override + public void start() { + dockerComposeManager.startId(id); + } + + @Override + public void stop() { + dockerComposeManager.stopId(id); + } + } +} diff --git a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/Main.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeInfo.java similarity index 74% rename from spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/Main.java rename to spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeInfo.java index 3c146901d9..4df416d24e 100644 --- a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/Main.java +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeInfo.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2018-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,12 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; -package com.acme.boot13; +public interface DockerComposeInfo { -public class Main { - - public static void main(String[] args) { - System.out.println("Hello World"); - } + DockerComposeCluster id(String id); } diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeManager.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeManager.java new file mode 100644 index 0000000000..a45d022f20 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeManager.java @@ -0,0 +1,273 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.HealthChecks.toHaveAllPortsOpen; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.springframework.cloud.dataflow.common.test.docker.compose.DockerComposeRule; +import org.springframework.cloud.dataflow.common.test.docker.compose.DockerComposeRule.Builder; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles; + +/** + * + * @author Janne Valkealahti + * + */ +public class DockerComposeManager { + + private final Map rules = new HashMap<>(); + private final Map> classKeys = new HashMap<>(); + private final Map> methodKeys = new HashMap<>(); + + public DockerComposeManager() {} + + public void addClassDockerComposeData(String classKey, DockerComposeData dockerComposeData) { + String key = dockerComposeData.id + "$" + classKey; + classKeys.putIfAbsent(key, new ArrayList<>()); + classKeys.get(key).add(dockerComposeData); + } + + public void addMethodDockerComposeData(String classKey, String methodKey, DockerComposeData dockerComposeData) { + String key = dockerComposeData.id + "$" + classKey; + if (classKeys.containsKey(key)) { + classKeys.get(key).add(dockerComposeData); + } + else { + key = dockerComposeData.id + "$" + classKey + methodKey; + methodKeys.putIfAbsent(key, new ArrayList<>()); + methodKeys.get(key).add(dockerComposeData); + } + } + + public DockerComposeRule getRule(String id) { + for (Entry e : rules.entrySet()) { + String idMatch = e.getKey().substring(0, e.getKey().indexOf("$")); + if (id.equals(idMatch)) { + return e.getValue(); + } + } + throw new IllegalArgumentException("Id " + id + " not found"); + } + + public void build(String classKey, String methodKey) { + + ArrayList toStart = new ArrayList<>(); + + // class level + for (Entry> e : classKeys.entrySet()) { + String key = e.getKey(); + ArrayList locations = new ArrayList<>(); + ArrayList services = new ArrayList<>(); + boolean start = true; + Integer order = Integer.MAX_VALUE; + String log = ""; + for (DockerComposeData dockerComposeData : e.getValue()) { + locations.addAll(Arrays.asList(dockerComposeData.getLocations())); + services.addAll(Arrays.asList(dockerComposeData.getServices())); + if (!dockerComposeData.isStart()) { + start = false; + } + if (dockerComposeData.getOrder() < order) { + order = dockerComposeData.getOrder(); + } + if (dockerComposeData.getLog() != null && dockerComposeData.getLog().length() > 0) { + log = dockerComposeData.getLog(); + } + } + Builder builder = DockerComposeRule.builder(); + builder.files(DockerComposeFiles.from(locations.toArray(new String[0]))); + for (String service : services) { + builder.waitingForService(service, toHaveAllPortsOpen()); + } + builder.saveLogsTo("build/test-docker-logs/" + log + classKey + "-" + methodKey); + DockerComposeRule rule = builder.build(); + rules.put(key, rule); + if (start) { + toStart.add(new OrderingWrapper(order, rule)); + } + } + + // method level + for (Entry> e : methodKeys.entrySet()) { + String key = e.getKey(); + ArrayList locations = new ArrayList<>(); + ArrayList services = new ArrayList<>(); + boolean start = true; + Integer order = Integer.MAX_VALUE; + String log = ""; + for (DockerComposeData dockerComposeData : e.getValue()) { + locations.addAll(Arrays.asList(dockerComposeData.getLocations())); + services.addAll(Arrays.asList(dockerComposeData.getServices())); + if (!dockerComposeData.isStart()) { + start = false; + } + if (dockerComposeData.getOrder() < order) { + order = dockerComposeData.getOrder(); + } + if (dockerComposeData.getLog() != null && dockerComposeData.getLog().length() > 0) { + log = dockerComposeData.getLog(); + } + } + Builder builder = DockerComposeRule.builder(); + builder.files(DockerComposeFiles.from(locations.toArray(new String[0]))); + for (String service : services) { + builder.waitingForService(service, toHaveAllPortsOpen()); + } + builder.saveLogsTo("build/test-docker-logs/" + log + classKey + "-" + methodKey); + DockerComposeRule rule = builder.build(); + rules.put(key, rule); + if (start) { + toStart.add(new OrderingWrapper(order, rule)); + } + } + + Collections.sort(toStart); + for (OrderingWrapper w : toStart) { + try { + w.getRule().before(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + } + + public void stop(String classKey, String methodKey) { + ArrayList toRemove = new ArrayList<>(); + for (Entry e : rules.entrySet()) { + String idMatch = e.getKey().substring(e.getKey().indexOf("$") + 1, e.getKey().length()); + if (idMatch.equals(classKey)) { + toRemove.add(e.getKey()); + } + if (idMatch.equals(classKey + methodKey)) { + toRemove.add(e.getKey()); + } + } + for (String remove : toRemove) { + DockerComposeRule rule = rules.remove(remove); + if (rule != null) { + rule.after(); + } + } + // for now, just clear both class and method keys + classKeys.clear(); + methodKeys.clear(); + } + + public void startId(String id) { + DockerComposeRule rule = null; + for (Entry e : rules.entrySet()) { + String idMatch = e.getKey().substring(0, e.getKey().indexOf("$")); + if (id.equals(idMatch)) { + rule = e.getValue(); + } + } + if (rule != null) { + try { + rule.before(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + + public void stopId(String id) { + DockerComposeRule rule = null; + for (Entry e : rules.entrySet()) { + String idMatch = e.getKey().substring(0, e.getKey().indexOf("$")); + if (id.equals(idMatch)) { + rule = e.getValue(); + } + } + if (rule != null) { + rule.after(); + } + } + + public static class DockerComposeData { + + private final String id; + private final boolean start; + private final String[] locations; + private final String[] services; + private final String log; + private final int order; + + public DockerComposeData(String id, String[] locations, String[] services, String log, boolean start, int order) { + this.id = id; + this.locations = locations; + this.services = services; + this.log = log; + this.start = start; + this.order = order; + } + + public String[] getLocations() { + return locations; + } + + public String[] getServices() { + return services; + } + + public String getLog() { + return log; + } + + public String getId() { + return id; + } + + public boolean isStart() { + return start; + } + + public int getOrder() { + return order; + } + } + + private static class OrderingWrapper implements Comparable{ + Integer order; + DockerComposeRule rule; + + public OrderingWrapper(Integer order, DockerComposeRule rule) { + this.order = order; + this.rule = rule; + } + + public Integer getOrder() { + return order; + } + + public DockerComposeRule getRule() { + return rule; + } + + @Override + public int compareTo(OrderingWrapper o) { + return getOrder().compareTo(o.getOrder()); + } + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposes.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposes.java new file mode 100644 index 0000000000..303eec75de --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposes.java @@ -0,0 +1,32 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Documented +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.TYPE, ElementType.METHOD}) +public @interface DockerComposes { + + DockerCompose[] value(); +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/LegacyDockerComposeExtension.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/LegacyDockerComposeExtension.java new file mode 100644 index 0000000000..c1022e43b2 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/LegacyDockerComposeExtension.java @@ -0,0 +1,60 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import org.springframework.cloud.dataflow.common.test.docker.compose.DockerComposeRule; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ProjectName; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterWait; +import org.springframework.cloud.dataflow.common.test.docker.compose.logging.LogCollector; + +import java.util.List; + +import org.junit.jupiter.api.extension.AfterAllCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; + +public class LegacyDockerComposeExtension extends DockerComposeRule implements BeforeAllCallback, AfterAllCallback { + + private LegacyDockerComposeExtension(DockerComposeFiles files, List clusterWaits, + LogCollector logCollector, DockerMachine machine, boolean pullOnStartup, ProjectName projectName) { + super(files, clusterWaits, logCollector, machine, pullOnStartup, projectName); + } + + @Override + public void afterAll(ExtensionContext context) throws Exception { + after(); + } + + @Override + public void beforeAll(ExtensionContext context) throws Exception { + before(); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder extends DockerComposeRule.Builder { + + @Override + public LegacyDockerComposeExtension build() { + return new LegacyDockerComposeExtension(files, clusterWaits, logCollector, machine, pullOnStartup, + projectName); + } + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/resources/application.properties b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/resources/application.properties new file mode 100644 index 0000000000..e69de29bb2 diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose1Tests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose1Tests.java new file mode 100644 index 0000000000..2a39062dd3 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose1Tests.java @@ -0,0 +1,45 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import java.io.IOException; + +import org.junit.jupiter.api.Test; + +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerExecutionException; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; + +@DockerCompose(locations = {"src/test/resources/docker-compose-1.yml"}) +public class DockerCompose1Tests { + + @Test + public void testCompose(DockerComposeInfo dockerComposeInfo) throws IOException, InterruptedException { + assertThat(dockerComposeInfo).isNotNull(); + assertThat(dockerComposeInfo.id("").getRule()).isNotNull(); + assertThat(dockerComposeInfo.id("").getRule().containers().container("testservice1")).isNotNull(); + + Throwable thrown = catchThrowable(() -> { + dockerComposeInfo.id("").getRule().containers().container("testservice2").state(); + }); + assertThat(thrown) + .isInstanceOf(DockerExecutionException.class) + .hasNoCause(); + assertThat(thrown).message() + .containsIgnoringCase("No such service: testservice2"); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose2Tests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose2Tests.java new file mode 100644 index 0000000000..85d5822e8b --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose2Tests.java @@ -0,0 +1,46 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import java.io.IOException; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.cloud.dataflow.common.test.docker.junit5.DockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.junit5.DockerComposeExtension; +import org.springframework.cloud.dataflow.common.test.docker.junit5.DockerComposeInfo; + +@DockerCompose(id = DockerCompose2Tests.CLUSTER1, locations = {"src/test/resources/docker-compose-1.yml"}) +@DockerCompose(id = DockerCompose2Tests.CLUSTER2, locations = {"src/test/resources/docker-compose-2.yml"}, start = false) +@ExtendWith(DockerComposeExtension.class) +public class DockerCompose2Tests { + + public final static String CLUSTER1 = "dc1"; + public final static String CLUSTER2 = "dc2"; + public final static String CLUSTER3 = "dc3"; + public final static String CLUSTER4 = "dc4"; + + @Test + @DockerCompose(id = DockerCompose2Tests.CLUSTER3, locations = {"src/test/resources/docker-compose-3.yml"}) + @DockerCompose(id = DockerCompose2Tests.CLUSTER4, locations = {"src/test/resources/docker-compose-4.yml"}, start = false) + public void testCompose(DockerComposeInfo dockerComposeInfo) throws IOException, InterruptedException { + + dockerComposeInfo.id(CLUSTER2).start(); + Thread.sleep(1000); + dockerComposeInfo.id(CLUSTER4).start(); + Thread.sleep(1000); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose3Tests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose3Tests.java new file mode 100644 index 0000000000..13f17f89f6 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose3Tests.java @@ -0,0 +1,43 @@ +/* + * Copyright 2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; + +import java.io.IOException; + +import org.junit.jupiter.api.Test; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerExecutionException; +import org.springframework.cloud.dataflow.common.test.docker.junit5.DockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.junit5.DockerComposeInfo; + +@DockerCompose(locations = {"classpath:org/springframework/cloud/dataflow/common/test/docker/junit5/docker-compose-cp1.yml"}) +public class DockerCompose3Tests { + + @Test + public void testCompose(DockerComposeInfo dockerComposeInfo) throws IOException, InterruptedException { + assertThat(dockerComposeInfo).isNotNull(); + assertThat(dockerComposeInfo.id("").getRule()).isNotNull(); + assertThat(dockerComposeInfo.id("").getRule().containers().container("testservice1")).isNotNull(); + + Throwable thrown = catchThrowable(() -> { + dockerComposeInfo.id("").getRule().containers().container("testservice2").state(); + }); + assertThat(thrown).isInstanceOf(DockerExecutionException.class).hasNoCause() + .message().containsIgnoringCase("No such service: testservice2"); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-1.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-1.yml new file mode 100644 index 0000000000..cb8dbff2d9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-1.yml @@ -0,0 +1,4 @@ +services: + testservice1: + image: 'springcloud/openjdk:latest' + command: sh -c 'sleep 60' diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-2.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-2.yml new file mode 100644 index 0000000000..4500793c1f --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-2.yml @@ -0,0 +1,5 @@ +services: + testservice2: + image: 'springcloud/openjdk:latest' + command: sh -c 'sleep 60' + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-3.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-3.yml new file mode 100644 index 0000000000..38da37eb91 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-3.yml @@ -0,0 +1,5 @@ +services: + testservice3: + image: 'springcloud/openjdk:latest' + command: sh -c 'sleep 60' + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-4.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-4.yml new file mode 100644 index 0000000000..1605ea0e78 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-4.yml @@ -0,0 +1,4 @@ +services: + testservice4: + image: 'springcloud/openjdk:latest' + command: sh -c 'sleep 60' diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-5.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-5.yml new file mode 100644 index 0000000000..c7e4357f6c --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-5.yml @@ -0,0 +1,4 @@ +services: + testservice5: + image: 'springcloud/openjdk:latest' + command: sh -c 'sleep 60' diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-6.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-6.yml new file mode 100644 index 0000000000..682a582af4 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-6.yml @@ -0,0 +1,5 @@ +services: + testservice6: + image: 'springcloud/openjdk:latest' + command: sh -c 'sleep 60' + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/org/springframework/cloud/dataflow/common/test/docker/junit5/docker-compose-cp1.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/org/springframework/cloud/dataflow/common/test/docker/junit5/docker-compose-cp1.yml new file mode 100644 index 0000000000..cb8dbff2d9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/org/springframework/cloud/dataflow/common/test/docker/junit5/docker-compose-cp1.yml @@ -0,0 +1,4 @@ +services: + testservice1: + image: 'springcloud/openjdk:latest' + command: sh -c 'sleep 60' diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/pom.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/pom.xml new file mode 100644 index 0000000000..b995d250cc --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/pom.xml @@ -0,0 +1,73 @@ + + + 4.0.0 + + spring-cloud-dataflow-common-test-docker + jar + + Spring Cloud Dataflow Common Docker Test Support + Utilities to help using docker + + + org.springframework.cloud + spring-cloud-dataflow-common-parent + 2.11.6-SNAPSHOT + + + true + + + + org.springframework + spring-core + + + commons-io + commons-io + + + org.apache.commons + commons-lang3 + + + org.slf4j + slf4j-api + + + com.jayway.awaitility + awaitility + + + joda-time + joda-time + + + com.github.zafarkhaja + java-semver + + + org.springframework.boot + spring-boot-starter-test + test + + + junit + junit + test + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + 1.8 + 1.8 + + + + + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/DockerComposeRule.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/DockerComposeRule.java new file mode 100644 index 0000000000..8fb219a97b --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/DockerComposeRule.java @@ -0,0 +1,299 @@ +/* + * Copyright 2018-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose; + +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterHealthCheck.serviceHealthCheck; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterHealthCheck.transformingHealthCheck; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import org.joda.time.Duration; +import org.joda.time.ReadableDuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ProjectName; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Cluster; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerCache; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterHealthCheck; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterWait; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.HealthCheck; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.ConflictingContainerRemovingDockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DefaultDockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecArgument; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecOption; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecutable; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeRunArgument; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeRunOption; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerExecutable; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.RetryingDockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.logging.DoNothingLogCollector; +import org.springframework.cloud.dataflow.common.test.docker.compose.logging.FileLogCollector; +import org.springframework.cloud.dataflow.common.test.docker.compose.logging.LogCollector; +import org.springframework.cloud.dataflow.common.test.docker.compose.logging.LogDirectory; + +public class DockerComposeRule { + + public static final Duration DEFAULT_TIMEOUT = Duration.standardMinutes(2); + public static final int DEFAULT_RETRY_ATTEMPTS = 2; + private ProjectName projectName; + + private static final Logger log = LoggerFactory.getLogger(DockerComposeRule.class); + + public DockerPort hostNetworkedPort(int port) { + return new DockerPort(machine().getIp(), port, port); + } + + private DockerComposeFiles files; + private List clusterWaits; + private LogCollector logCollector; + private DockerMachine machine; + private boolean pullOnStartup; + + protected DockerComposeRule() {} + + public DockerComposeRule(DockerComposeFiles files, List clusterWaits, LogCollector logCollector, + DockerMachine machine, boolean pullOnStartup, ProjectName projectName) { + super(); + this.files = files; + this.clusterWaits = clusterWaits; + this.logCollector = logCollector; + this.machine = machine; + this.pullOnStartup = pullOnStartup; + this.projectName = projectName != null ? projectName : ProjectName.random(); + } + + public DockerComposeFiles files() { + return files; + } + + public List clusterWaits() { + return clusterWaits; + } + + public DockerMachine machine() { + return machine != null ? machine : DockerMachine.localMachine().build(); + } + + public ProjectName projectName() { + return projectName; + } + + public DockerComposeExecutable dockerComposeExecutable() { + return DockerComposeExecutable.builder() + .dockerComposeFiles(files()) + .dockerConfiguration(machine()) + .projectName(projectName()) + .build(); + } + + public DockerExecutable dockerExecutable() { + return DockerExecutable.builder() + .dockerConfiguration(machine()) + .build(); + } + + public Docker docker() { + return new Docker(dockerExecutable()); + } + + public ShutdownStrategy shutdownStrategy() { + return ShutdownStrategy.KILL_DOWN; + } + + public DockerCompose dockerCompose() { + DockerCompose dockerCompose = new DefaultDockerCompose(dockerComposeExecutable(), machine()); + return new RetryingDockerCompose(retryAttempts(), dockerCompose); + } + + public Cluster containers() { + return Cluster.builder() + .ip(machine().getIp()) + .containerCache(new ContainerCache(docker(), dockerCompose())) + .build(); + } + + protected int retryAttempts() { + return DEFAULT_RETRY_ATTEMPTS; + } + + protected boolean removeConflictingContainersOnStartup() { + return true; + } + + protected boolean pullOnStartup() { + return pullOnStartup; + } + + protected ReadableDuration nativeServiceHealthCheckTimeout() { + return DEFAULT_TIMEOUT; + } + + protected LogCollector logCollector() { + return logCollector != null ? logCollector : new DoNothingLogCollector(); + } + + public void before() throws IOException, InterruptedException { + log.debug("Starting docker-compose cluster"); + if (pullOnStartup()) { + dockerCompose().pull(); + } + + dockerCompose().build(); + + DockerCompose upDockerCompose = dockerCompose(); + if (removeConflictingContainersOnStartup()) { + upDockerCompose = new ConflictingContainerRemovingDockerCompose(upDockerCompose, docker()); + } + upDockerCompose.up(); + + logCollector().startCollecting(dockerCompose()); + log.debug("Waiting for services"); + new ClusterWait(ClusterHealthCheck.nativeHealthChecks(), nativeServiceHealthCheckTimeout()) + .waitUntilReady(containers()); + clusterWaits().forEach(clusterWait -> clusterWait.waitUntilReady(containers())); + log.debug("docker-compose cluster started"); + } + + public void after() { + try { + shutdownStrategy().shutdown(this.dockerCompose(), this.docker()); + logCollector().stopCollecting(); + } catch (IOException | InterruptedException e) { + throw new RuntimeException("Error cleaning up docker compose cluster", e); + } + } + + public String exec(DockerComposeExecOption options, String containerName, + DockerComposeExecArgument arguments) throws IOException, InterruptedException { + return dockerCompose().exec(options, containerName, arguments); + } + + public String run(DockerComposeRunOption options, String containerName, + DockerComposeRunArgument arguments) throws IOException, InterruptedException { + return dockerCompose().run(options, containerName, arguments); + } + + public static Builder builder() { + return new Builder<>(); + } + + public static class Builder> { + + protected DockerComposeFiles files; + protected List clusterWaits = new ArrayList<>(); + protected LogCollector logCollector; + protected DockerMachine machine; + protected boolean pullOnStartup; + protected ProjectName projectName; + + public T files(DockerComposeFiles files) { + this.files = files; + return self(); + } + + public T file(String dockerComposeYmlFile) { + return files(DockerComposeFiles.from(dockerComposeYmlFile)); + } + + /** + * Save the output of docker logs to files, stored in the path directory. + * + * See {@link LogDirectory} for some useful utilities, for example: + * {@link LogDirectory#circleAwareLogDirectory}. + * + * @param path directory into which log files should be saved + * @return builder for chaining + */ + public T saveLogsTo(String path) { + return logCollector(FileLogCollector.fromPath(path)); + } + + public T logCollector(LogCollector logCollector) { + this.logCollector = logCollector; + return self(); + } + + @Deprecated + public T waitingForService(String serviceName, HealthCheck healthCheck) { + return waitingForService(serviceName, healthCheck, DEFAULT_TIMEOUT); + } + + public T waitingForService(String serviceName, HealthCheck healthCheck, ReadableDuration timeout) { + ClusterHealthCheck clusterHealthCheck = serviceHealthCheck(serviceName, healthCheck); + return addClusterWait(new ClusterWait(clusterHealthCheck, timeout)); + } + + private T addClusterWait(ClusterWait clusterWait) { + clusterWaits.add(clusterWait); + return self(); + } + + public T waitingForServices(List services, HealthCheck> healthCheck) { + return waitingForServices(services, healthCheck, DEFAULT_TIMEOUT); + } + + public T waitingForServices(List services, HealthCheck> healthCheck, ReadableDuration timeout) { + ClusterHealthCheck clusterHealthCheck = serviceHealthCheck(services, healthCheck); + return addClusterWait(new ClusterWait(clusterHealthCheck, timeout)); + } + + public T waitingForHostNetworkedPort(int port, HealthCheck healthCheck) { + return waitingForHostNetworkedPort(port, healthCheck, DEFAULT_TIMEOUT); + } + + public T waitingForHostNetworkedPort(int port, HealthCheck healthCheck, ReadableDuration timeout) { + ClusterHealthCheck clusterHealthCheck = transformingHealthCheck(cluster -> new DockerPort(cluster.ip(), port, port), healthCheck); + return addClusterWait(new ClusterWait(clusterHealthCheck, timeout)); + } + + public T clusterWaits(Iterable elements) { + elements.forEach(e -> clusterWaits.add(e)); + return self(); + } + + public T machine(DockerMachine machine) { + this.machine = machine; + return self(); + } + + public T pullOnStartup(boolean pullOnStartup) { + this.pullOnStartup = pullOnStartup; + return self(); + } + + public T projectName(ProjectName projectName) { + this.projectName = projectName; + return self(); + } + + @SuppressWarnings("unchecked") + final T self() { + return (T) this; + } + + public DockerComposeRule build() { + return new DockerComposeRule(files, clusterWaits, logCollector, machine, pullOnStartup, projectName); + } + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/AdditionalEnvironmentValidator.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/AdditionalEnvironmentValidator.java new file mode 100644 index 0000000000..2c9a254427 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/AdditionalEnvironmentValidator.java @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.util.Assert; + +public final class AdditionalEnvironmentValidator { + + private static final Set ILLEGAL_VARIABLES = new HashSet<>(Arrays.asList(DOCKER_TLS_VERIFY, DOCKER_HOST, DOCKER_CERT_PATH)); + + private AdditionalEnvironmentValidator() {} + + public static Map validate(Map additionalEnvironment) { + HashSet invalidVariables = new HashSet<>(additionalEnvironment.keySet()); + invalidVariables.retainAll(ILLEGAL_VARIABLES); + + String errorMessage = invalidVariables.stream() + .collect(Collectors.joining(", ", + "The following variables: ", + " cannot exist in your additional environment variable block as they will interfere with Docker.")); + Assert.state(invalidVariables.isEmpty(), errorMessage); + return additionalEnvironment; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonEnvironmentValidator.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonEnvironmentValidator.java new file mode 100644 index 0000000000..1c5c0cf694 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonEnvironmentValidator.java @@ -0,0 +1,56 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static java.util.stream.Collectors.joining; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.util.Assert; + +public class DaemonEnvironmentValidator implements EnvironmentValidator { + + private static final Set ILLEGAL_VARIABLES = new HashSet<>(Arrays.asList(DOCKER_TLS_VERIFY, DOCKER_HOST, DOCKER_CERT_PATH)); + + private static final DaemonEnvironmentValidator INSTANCE = new DaemonEnvironmentValidator(); + + public static DaemonEnvironmentValidator instance() { + return INSTANCE; + } + + private DaemonEnvironmentValidator() {} + + @Override + public void validateEnvironmentVariables(Map dockerEnvironment) { + Set invalidVariables = ILLEGAL_VARIABLES.stream() + .filter(dockerEnvironment::containsKey) + .collect(Collectors.toSet()); + + String errorMessage = invalidVariables.stream() + .collect(joining(", ", + "These variables were set: ", + ". They cannot be set when connecting to a local docker daemon.")); + Assert.state(invalidVariables.isEmpty(), errorMessage); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonHostIpResolver.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonHostIpResolver.java new file mode 100644 index 0000000000..2917b70aae --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonHostIpResolver.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +public class DaemonHostIpResolver implements HostIpResolver { + + public static final String LOCALHOST = "127.0.0.1"; + + @Override + public String resolveIp(String dockerHost) { + return LOCALHOST; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerComposeFiles.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerComposeFiles.java new file mode 100644 index 0000000000..c0a7488a2a --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerComposeFiles.java @@ -0,0 +1,95 @@ +/* + * Copyright 2018-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static java.util.stream.Collectors.joining; +import static java.util.stream.Collectors.toList; + +import java.io.File; +import java.net.URL; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + +import org.springframework.util.Assert; + +public class DockerComposeFiles { + + private final List dockerComposeFiles; + + public DockerComposeFiles(List dockerComposeFiles) { + this.dockerComposeFiles = dockerComposeFiles; + } + + public static DockerComposeFiles from(String... dockerComposeFilenames) { + List dockerComposeFiles = Arrays.asList(dockerComposeFilenames).stream() + .map(fileName -> { + Path path = null; + if (fileName.startsWith("classpath:")) { + URL resourceUrl = ClassLoader.getSystemResource(fileName.substring(10)); + if (resourceUrl == null) { + throw new IllegalArgumentException("Can't find resource " + fileName); + } + try { + path = Paths.get(resourceUrl.toURI()); + } catch (Exception e) { + throw new IllegalArgumentException("Can't find resource " + fileName, e); + } + } else { + path = Paths.get(fileName); + } + return path; + }) + .map(path -> path.toFile()) + .collect(toList()); + validateAtLeastOneComposeFileSpecified(dockerComposeFiles); + validateComposeFilesExist(dockerComposeFiles); + return new DockerComposeFiles(dockerComposeFiles); + } + + public static DockerComposeFiles fromxx(String... dockerComposeFilenames) { + List dockerComposeFiles = Arrays.asList(dockerComposeFilenames).stream() + .map(File::new) + .collect(toList()); + validateAtLeastOneComposeFileSpecified(dockerComposeFiles); + validateComposeFilesExist(dockerComposeFiles); + return new DockerComposeFiles(dockerComposeFiles); + } + + public List constructComposeFileCommand() { + return dockerComposeFiles.stream() + .map(File::getAbsolutePath) + .map(f -> Arrays.asList("--file", f)) + .flatMap(Collection::stream) + .collect(toList()); + } + + private static void validateAtLeastOneComposeFileSpecified(List dockerComposeFiles) { + Assert.state(!dockerComposeFiles.isEmpty(), "A docker compose file must be specified."); + } + + private static void validateComposeFilesExist(List dockerComposeFiles) { + List missingFiles = dockerComposeFiles.stream() + .filter(f -> !f.exists()) + .collect(toList()); + String errorMessage = missingFiles.stream() + .map(File::getAbsolutePath) + .collect(joining(", ", "The following docker-compose files: ", " do not exist.")); + Assert.state(missingFiles.isEmpty(), errorMessage); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerType.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerType.java new file mode 100644 index 0000000000..92038f3dab --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerType.java @@ -0,0 +1,55 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.util.Map; +import java.util.Optional; + +public enum DockerType implements HostIpResolver, EnvironmentValidator { + DAEMON(DaemonEnvironmentValidator.instance(), new DaemonHostIpResolver()), + REMOTE(RemoteEnvironmentValidator.instance(), new RemoteHostIpResolver()); + + private final EnvironmentValidator validator; + private final HostIpResolver resolver; + + DockerType(EnvironmentValidator validator, HostIpResolver resolver) { + this.validator = validator; + this.resolver = resolver; + } + + @Override + public void validateEnvironmentVariables(Map dockerEnvironment) { + validator.validateEnvironmentVariables(dockerEnvironment); + } + + @Override + public String resolveIp(String dockerHost) { + return resolver.resolveIp(dockerHost); + } + + public static Optional getFirstValidDockerTypeForEnvironment(Map environment) { + for (DockerType currType : DockerType.values()) { + try { + currType.validateEnvironmentVariables(environment); + return Optional.of(currType); + } catch (IllegalStateException e) { + // ignore and try next type + } + } + return Optional.empty(); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentValidator.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentValidator.java new file mode 100644 index 0000000000..e2b7137e03 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentValidator.java @@ -0,0 +1,34 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.util.Map; + +public interface EnvironmentValidator { + + /** + * Validates that the entries in the provided map are valid for the current environment. + * The provided map represents the environment variables that should be used for the + * process, where the keys are the environment variable names and the values are the values. + * If the validator determines the state represented by the map is invalid (either because + * required values are missing or forbidden values are present), the method should throw + * an exception. + * + * @param dockerEnvironment A map representing the docker environment + */ + void validateEnvironmentVariables(Map dockerEnvironment); + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentVariables.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentVariables.java new file mode 100644 index 0000000000..141a667401 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentVariables.java @@ -0,0 +1,25 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +public interface EnvironmentVariables { + + String TCP_PROTOCOL = "tcp://"; + String DOCKER_CERT_PATH = "DOCKER_CERT_PATH"; + String DOCKER_HOST = "DOCKER_HOST"; + String DOCKER_TLS_VERIFY = "DOCKER_TLS_VERIFY"; + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/HostIpResolver.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/HostIpResolver.java new file mode 100644 index 0000000000..3136bf4388 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/HostIpResolver.java @@ -0,0 +1,22 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +public interface HostIpResolver { + + String resolveIp(String dockerHost); + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/PackageVisible.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/PackageVisible.java new file mode 100644 index 0000000000..1541717ea9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/PackageVisible.java @@ -0,0 +1,22 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + +@Target({ElementType.PACKAGE, ElementType.TYPE}) +@interface PackageVisible {} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ProjectName.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ProjectName.java new file mode 100644 index 0000000000..b7585d279d --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ProjectName.java @@ -0,0 +1,79 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.util.Arrays; + +import java.util.List; +import java.util.UUID; +import java.util.function.Predicate; +import java.util.regex.Pattern; + +import org.springframework.util.Assert; + +@PackageVisible +public class ProjectName { + + private String projectName; + + public ProjectName(String projectName) { + this.projectName = projectName; + validate(); + } + + protected String projectName() { + return projectName; + } + + protected void validate() { + Assert.state(projectName().trim().length() > 0, "ProjectName must not be blank."); + Assert.state(validCharacters(), + "ProjectName '" + projectName() + "' not allowed, please use lowercase letters and numbers only."); + } + + // Only allows strings that docker-compose-cli would not modify + // https://github.com/docker/compose/blob/85e2fb63b3309280a602f1f76d77d3a82e53b6c2/compose/cli/command.py#L84 + protected boolean validCharacters() { + Predicate illegalCharacters = Pattern.compile("[^a-z0-9]").asPredicate(); + return !illegalCharacters.test(projectName()); + } + + public String asString() { + return projectName(); + } + + public List constructComposeFileCommand() { + return Arrays.asList("--project-name", projectName()); + } + + public static ProjectName random() { + return ProjectName.of(UUID.randomUUID().toString().substring(0, 8)); + } + + /** + * A name consisting of lowercase letters and numbers only. + * + * @param name the name + * @return project name + */ + public static ProjectName fromString(String name) { + return ProjectName.of(name); + } + + private static ProjectName of(String name) { + return new ProjectName(name); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteEnvironmentValidator.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteEnvironmentValidator.java new file mode 100644 index 0000000000..7e158b1043 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteEnvironmentValidator.java @@ -0,0 +1,72 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static java.util.stream.Collectors.joining; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; + +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +public class RemoteEnvironmentValidator implements EnvironmentValidator { + + private static final Set SECURE_VARIABLES = new HashSet<>(Arrays.asList(DOCKER_TLS_VERIFY, DOCKER_CERT_PATH)); + private static final RemoteEnvironmentValidator VALIDATOR = new RemoteEnvironmentValidator(); + + public static RemoteEnvironmentValidator instance() { + return VALIDATOR; + } + + private RemoteEnvironmentValidator() {} + + @Override + public void validateEnvironmentVariables(Map dockerEnvironment) { + Collection missingVariables = getMissingEnvVariables(dockerEnvironment); + String errorMessage = missingVariables.stream() + .collect(joining(", ", + "Missing required environment variables: ", + ". Please run `docker-machine env ` and " + + "ensure they are set on the DockerComposition.")); + + Assert.state(missingVariables.isEmpty(), errorMessage); + } + + private static Collection getMissingEnvVariables(Map dockerEnvironment) { + Collection requiredVariables = new HashSet<>(Arrays.asList(DOCKER_HOST)); + requiredVariables.addAll(secureVariablesRequired(dockerEnvironment)); + return requiredVariables.stream() + .filter(envVariable -> !StringUtils.hasText(dockerEnvironment.get(envVariable))) + .collect(Collectors.toSet()); + } + + private static Set secureVariablesRequired(Map dockerEnvironment) { + return certVerificationEnabled(dockerEnvironment) ? SECURE_VARIABLES : new HashSet<>(); + } + + private static boolean certVerificationEnabled(Map dockerEnvironment) { + return dockerEnvironment.containsKey(DOCKER_TLS_VERIFY); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteHostIpResolver.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteHostIpResolver.java new file mode 100644 index 0000000000..242b677187 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteHostIpResolver.java @@ -0,0 +1,32 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.TCP_PROTOCOL; + +import java.util.Optional; +import org.apache.commons.lang3.StringUtils; + +public class RemoteHostIpResolver implements HostIpResolver { + + @Override + public String resolveIp(String dockerHost) { + return Optional.ofNullable(org.springframework.util.StringUtils.hasText(dockerHost) ? dockerHost : null) + .map(host -> StringUtils.substringAfter(host, TCP_PROTOCOL)) + .map(ipAndMaybePort -> StringUtils.substringBefore(ipAndMaybePort, ":")) + .orElseThrow(() -> new IllegalArgumentException("DOCKER_HOST cannot be blank/null")); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ShutdownStrategy.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ShutdownStrategy.java new file mode 100644 index 0000000000..9c5f44c979 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ShutdownStrategy.java @@ -0,0 +1,72 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.io.IOException; + +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.AggressiveShutdownStrategy; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.AggressiveShutdownWithNetworkCleanupStrategy; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.GracefulShutdownStrategy; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.KillDownShutdownStrategy; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.SkipShutdownStrategy; + +/** + * How should a cluster of containers be shut down by the `after` method of + * DockerComposeRule. + */ +public interface ShutdownStrategy { + + /** + * Call rm on all containers, working around btrfs bug on CircleCI. + * + * @deprecated Use {@link #KILL_DOWN} (the default strategy) + */ + @Deprecated + ShutdownStrategy AGGRESSIVE = new AggressiveShutdownStrategy(); + /** + * Call rm on all containers, then call docker-compose down. + * + * @deprecated Use {@link #KILL_DOWN} (the default strategy) + */ + @Deprecated + ShutdownStrategy AGGRESSIVE_WITH_NETWORK_CLEANUP = new AggressiveShutdownWithNetworkCleanupStrategy(); + /** + * Call docker-compose down, kill, then rm. Allows containers up to 10 seconds to shut down + * gracefully. + * + *

With this strategy, you will need to take care not to accidentally write images + * that ignore their down signal, for instance by putting their run command in as a + * string (which is interpreted by a SIGTERM-ignoring bash) rather than an array of strings. + */ + ShutdownStrategy GRACEFUL = new GracefulShutdownStrategy(); + /** + * Call docker-compose kill then down. + */ + ShutdownStrategy KILL_DOWN = new KillDownShutdownStrategy(); + /** + * Skip shutdown, leaving containers running after tests finish executing. + * + *

You can use this option to speed up repeated test execution locally by leaving + * images up between runs. Do not commit it! You will be potentially abandoning + * long-running processes and leaking resources on your CI platform! + */ + ShutdownStrategy SKIP = new SkipShutdownStrategy(); + + void shutdown(DockerCompose dockerCompose, Docker docker) throws IOException, InterruptedException; + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Cluster.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Cluster.java new file mode 100644 index 0000000000..26b9509a50 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Cluster.java @@ -0,0 +1,79 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import static java.util.stream.Collectors.toList; + +import java.io.IOException; +import java.util.List; +import java.util.Set; + +public class Cluster { + + private final String ip; + private final ContainerCache containerCache; + + public Cluster(String ip, ContainerCache containerCache) { + this.ip = ip; + this.containerCache = containerCache; + } + + public String ip() { + return ip; + } + + public ContainerCache containerCache() { + return containerCache; + } + + public Container container(String name) { + return containerCache().container(name); + } + + public List containers(List containerNames) { + return containerNames.stream() + .map(this::container) + .collect(toList()); + } + + public Set allContainers() throws IOException, InterruptedException { + return containerCache().containers(); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + + private String ip; + private ContainerCache containerCache; + + public Builder ip(String ip) { + this.ip = ip; + return this; + } + + public Builder containerCache(ContainerCache containerCache) { + this.containerCache = containerCache; + return this; + } + + public Cluster build() { + return new Cluster(ip, containerCache); + } + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Container.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Container.java new file mode 100644 index 0000000000..5db61da3dd --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Container.java @@ -0,0 +1,178 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailure; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +public class Container { + + private static final Logger log = LoggerFactory.getLogger(Container.class); + + private final String containerName; + private final Docker docker; + private final DockerCompose dockerCompose; + + private Supplier portMappings = memoize(() -> this.getDockerPorts()); + + public static Supplier memoize(Supplier original) { + return new Supplier() { + Supplier delegate = this::firstTime; + boolean initialized; + + public T get() { + return delegate.get(); + } + + private synchronized T firstTime() { + if (!initialized) { + T value = original.get(); + delegate = () -> value; + initialized = true; + } + return delegate.get(); + } + }; + } + + public Container(String containerName, Docker docker, DockerCompose dockerCompose) { + this.containerName = containerName; + this.docker = docker; + this.dockerCompose = dockerCompose; + } + + public String getContainerName() { + return containerName; + } + + public SuccessOrFailure portIsListeningOnHttpAndCheckStatus2xx(int internalPort, Function urlFunction) { + return portIsListeningOnHttp(internalPort, urlFunction, true); + } + + public SuccessOrFailure portIsListeningOnHttp(int internalPort, Function urlFunction) { + return portIsListeningOnHttp(internalPort, urlFunction, false); + } + + public SuccessOrFailure portIsListeningOnHttp(int internalPort, Function urlFunction, boolean andCheckStatus) { + try { + DockerPort port = port(internalPort); + if (!port.isListeningNow()) { + return SuccessOrFailure.failure("Internal port " + internalPort + " is not listening in container " + containerName); + } + return port.isHttpRespondingSuccessfully(urlFunction, andCheckStatus) + .mapFailure(failureMessage -> internalPort + " does not have a http response from " + urlFunction.apply(port) + ":\n" + failureMessage); + } catch (Exception e) { + return SuccessOrFailure.fromException(e); + } + } + + public DockerPort portMappedExternallyTo(int externalPort) { + return portMappings.get() + .stream() + .filter(port -> port.getExternalPort() == externalPort) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException("No port mapped externally to '" + externalPort + "' for container '" + containerName + "'")); + } + + public DockerPort port(int internalPort) { + return portMappings.get() + .stream() + .filter(port -> port.getInternalPort() == internalPort) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException("No internal port '" + internalPort + "' for container '" + containerName + "': " + portMappings)); + } + + public void start() throws IOException, InterruptedException { + dockerCompose.start(this); + portMappings = memoize(() -> this.getDockerPorts()); + } + + public void stop() throws IOException, InterruptedException { + dockerCompose.stop(this); + } + + public void kill() throws IOException, InterruptedException { + dockerCompose.kill(this); + } + + public State state() throws IOException, InterruptedException { + String id = dockerCompose.id(this).orElse(null); + if (id == null) { + return State.DOWN; + } + return docker.state(id); + } + + public void up() throws IOException, InterruptedException { + dockerCompose.up(this); + } + + public Ports ports() { + return portMappings.get(); + } + + private Ports getDockerPorts() { + try { + return dockerCompose.ports(containerName); + } catch (IOException | InterruptedException e) { + throw new RuntimeException(e); + } + } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + if (object == null || getClass() != object.getClass()) { + return false; + } + Container container = (Container) object; + return Objects.equals(containerName, container.containerName); + } + + @Override + public int hashCode() { + return Objects.hash(containerName); + } + + @Override + public String toString() { + return "Container{containerName='" + containerName + "'}"; + } + + public SuccessOrFailure areAllPortsOpen() { + List unavaliablePorts = portMappings.get().stream() + .filter(port -> !port.isListeningNow()) + .map(DockerPort::getInternalPort) + .collect(Collectors.toList()); + + boolean allPortsOpen = unavaliablePorts.isEmpty(); + String failureMessage = "The following ports failed to open: " + unavaliablePorts; + + return SuccessOrFailure.fromBoolean(allPortsOpen, failureMessage); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerCache.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerCache.java new file mode 100644 index 0000000000..305da85b72 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerCache.java @@ -0,0 +1,48 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import static java.util.stream.Collectors.toSet; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +public class ContainerCache { + + private final Map containers = new HashMap<>(); + private final Docker docker; + private final DockerCompose dockerCompose; + + public ContainerCache(Docker docker, DockerCompose dockerCompose) { + this.docker = docker; + this.dockerCompose = dockerCompose; + } + + public Container container(String containerName) { + containers.putIfAbsent(containerName, new Container(containerName, docker, dockerCompose)); + return containers.get(containerName); + } + + public Set containers() throws IOException, InterruptedException { + return dockerCompose.services().stream().map(this::container).collect(toSet()); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerName.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerName.java new file mode 100644 index 0000000000..d87c715010 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerName.java @@ -0,0 +1,135 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import static java.util.stream.Collectors.joining; + +import java.util.Arrays; + +public class ContainerName { + + private String rawName; + private String semanticName; + + public ContainerName(String rawName, String semanticName) { + this.rawName = rawName; + this.semanticName = semanticName; + } + + public String rawName() { + return rawName; + } + + public String semanticName() { + return semanticName; + } + + @Override + public String toString() { + return semanticName(); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((rawName == null) ? 0 : rawName.hashCode()); + result = prime * result + ((semanticName == null) ? 0 : semanticName.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + ContainerName other = (ContainerName) obj; + if (rawName == null) { + if (other.rawName != null) + return false; + } else if (!rawName.equals(other.rawName)) + return false; + if (semanticName == null) { + if (other.semanticName != null) + return false; + } else if (!semanticName.equals(other.semanticName)) + return false; + return true; + } + + public static ContainerName fromPsLine(String psLine) { + String[] lineComponents = psLine.split(" "); + String rawName = lineComponents[0]; + + if (probablyCustomName(rawName)) { + return ContainerName.builder() + .rawName(rawName) + .semanticName(rawName) + .build(); + } + + String semanticName = withoutDirectory(withoutScaleNumber(rawName)); + return ContainerName.builder() + .rawName(rawName) + .semanticName(semanticName) + .build(); + } + + private static boolean probablyCustomName(String rawName) { + return !(rawName.split("_").length >= 3); + } + + private static String withoutDirectory(String rawName) { + return Arrays.stream(rawName.split("_")) + .skip(1) + .collect(joining("_")); + } + + public static String withoutScaleNumber(String rawName) { + String[] components = rawName.split("_"); + return Arrays.stream(components) + .limit(components.length - 1) + .collect(joining("_")); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private String rawName; + private String semanticName; + + public Builder rawName(String rawName) { + this.rawName = rawName; + return this; + } + + public Builder semanticName(String semanticName) { + this.semanticName = semanticName; + return this; + } + + public ContainerName build() { + return new ContainerName(rawName, semanticName); + } + + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerNames.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerNames.java new file mode 100644 index 0000000000..83f557249c --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerNames.java @@ -0,0 +1,48 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import static java.util.Collections.emptyList; +import static java.util.stream.Collectors.toList; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Stream; + +public class ContainerNames { + + private ContainerNames() {} + + public static List parseFromDockerComposePs(String psOutput) { + String[] psHeadAndBody = psOutput.split("-+(\r|\n)+"); + if (psHeadAndBody.length < 2) { + return emptyList(); + } + + String psBody = psHeadAndBody[1]; + return psBodyLines(psBody) + .map(ContainerName::fromPsLine) + .collect(toList()); + } + + private static Stream psBodyLines(String psBody) { + String[] lines = psBody.split("(\r|\n)+"); + return Arrays.stream(lines) + .map(String::trim) + .filter(line -> !line.isEmpty()); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerMachine.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerMachine.java new file mode 100644 index 0000000000..0fb6c4ff94 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerMachine.java @@ -0,0 +1,162 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; + +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.AdditionalEnvironmentValidator; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerType; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.RemoteHostIpResolver; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerConfiguration; + +public class DockerMachine implements DockerConfiguration { + + private static final Logger log = LoggerFactory.getLogger(DockerMachine.class); + private static final DockerType FALLBACK_DOCKER_TYPE = DockerType.DAEMON; + + private final String hostIp; + private final Map environment; + + public DockerMachine(String hostIp, Map environment) { + this.hostIp = hostIp; + this.environment = environment; + } + + public String getIp() { + return hostIp; + } + + @Override + public ProcessBuilder configuredDockerComposeProcess() { + ProcessBuilder process = new ProcessBuilder(); + augmentGivenEnvironment(process.environment()); + return process; + } + + private void augmentGivenEnvironment(Map environmentToAugment) { + environmentToAugment.putAll(environment); + } + + public static LocalBuilder localMachine() { + Map systemEnv = System.getenv(); + Optional dockerType = DockerType.getFirstValidDockerTypeForEnvironment(systemEnv); + if (!dockerType.isPresent()) { + log.debug( + "Failed to determine Docker type (daemon or remote) based on current environment. " + + "Proceeding with {} as the type.", FALLBACK_DOCKER_TYPE); + } + + return new LocalBuilder(dockerType.orElse(FALLBACK_DOCKER_TYPE), systemEnv); + } + + public static LocalBuilder localMachine(DockerType dockerType) { + return new LocalBuilder(dockerType, System.getenv()); + } + + public static class LocalBuilder { + + private final DockerType dockerType; + private final Map systemEnvironment; + private Map additionalEnvironment = new HashMap<>(); + + LocalBuilder(DockerType dockerType, Map systemEnvironment) { + this.dockerType = dockerType; + this.systemEnvironment = new HashMap<>(systemEnvironment); + } + + public LocalBuilder withAdditionalEnvironmentVariable(String key, String value) { + additionalEnvironment.put(key, value); + return this; + } + + public LocalBuilder withEnvironment(Map newEnvironment) { + this.additionalEnvironment = new HashMap<>(newEnvironment != null ? newEnvironment : new HashMap<>()); + return this; + } + + public DockerMachine build() { + dockerType.validateEnvironmentVariables(systemEnvironment); + AdditionalEnvironmentValidator.validate(additionalEnvironment); + Map combinedEnvironment = new HashMap<>(); + combinedEnvironment.putAll(systemEnvironment); + combinedEnvironment.putAll(additionalEnvironment); + + String dockerHost = systemEnvironment.getOrDefault(DOCKER_HOST, ""); + return new DockerMachine(dockerType.resolveIp(dockerHost), new HashMap<>(combinedEnvironment)); + } + } + + public static RemoteBuilder remoteMachine() { + return new RemoteBuilder(); + } + + public static class RemoteBuilder { + + private final Map dockerEnvironment = new HashMap<>(); + private Map additionalEnvironment = new HashMap<>(); + + private RemoteBuilder() {} + + public RemoteBuilder host(String hostname) { + dockerEnvironment.put(DOCKER_HOST, hostname); + return this; + } + + public RemoteBuilder withTLS(String certPath) { + dockerEnvironment.put(DOCKER_TLS_VERIFY, "1"); + dockerEnvironment.put(DOCKER_CERT_PATH, certPath); + return this; + } + + public RemoteBuilder withoutTLS() { + dockerEnvironment.remove(DOCKER_TLS_VERIFY); + dockerEnvironment.remove(DOCKER_CERT_PATH); + return this; + } + + public RemoteBuilder withAdditionalEnvironmentVariable(String key, String value) { + additionalEnvironment.put(key, value); + return this; + } + + public RemoteBuilder withEnvironment(Map newEnvironment) { + this.additionalEnvironment = new HashMap<>(newEnvironment != null ? newEnvironment : new HashMap<>()); + return this; + } + + public DockerMachine build() { + DockerType.REMOTE.validateEnvironmentVariables(dockerEnvironment); + AdditionalEnvironmentValidator.validate(additionalEnvironment); + + String dockerHost = dockerEnvironment.getOrDefault(DOCKER_HOST, ""); + String hostIp = new RemoteHostIpResolver().resolveIp(dockerHost); + + Map environment = new HashMap<>(); + environment.putAll(dockerEnvironment); + environment.putAll(additionalEnvironment); + return new DockerMachine(hostIp, environment); + } + + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerPort.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerPort.java new file mode 100644 index 0000000000..6acecf1278 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerPort.java @@ -0,0 +1,147 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.MalformedURLException; +import java.net.Socket; +import java.net.SocketException; +import java.net.URL; +import java.util.Objects; +import java.util.function.Function; +import javax.net.ssl.SSLHandshakeException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailure; + +public class DockerPort { + + private static final Logger log = LoggerFactory.getLogger(DockerPort.class); + + private final String ip; + private final PortMapping portMapping; + + public DockerPort(String ip, int externalPort, int internalPort) { + this(ip, new PortMapping(externalPort, internalPort)); + } + + public DockerPort(String ip, PortMapping portMapping) { + this.ip = ip; + this.portMapping = portMapping; + } + + public String getIp() { + return ip; + } + + public int getExternalPort() { + return portMapping.getExternalPort(); + } + + public int getInternalPort() { + return portMapping.getInternalPort(); + } + + public boolean isListeningNow() { + try (Socket socket = new Socket()) { + socket.connect(new InetSocketAddress(ip, getExternalPort()), 500); + log.trace("External Port '{}' on ip '{}' was open", getExternalPort(), ip); + return true; + } catch (IOException e) { + return false; + } + } + + public boolean isHttpResponding(Function urlFunction, boolean andCheckStatus) { + return isHttpRespondingSuccessfully(urlFunction, andCheckStatus).succeeded(); + } + + public SuccessOrFailure isHttpRespondingSuccessfully(Function urlFunction, boolean andCheckStatus) { + URL url; + try { + String urlString = urlFunction.apply(this); + log.trace("Trying to connect to {}", urlString); + url = new URL(urlString); + } catch (MalformedURLException e) { + throw new RuntimeException("Could not create URL for connecting to localhost", e); + } + try { + url.openConnection().connect(); + url.openStream().read(); + log.debug("Http connection acquired, assuming port active"); + return SuccessOrFailure.success(); + } catch (SocketException e) { + return SuccessOrFailure.failureWithCondensedException("Failed to acquire http connection, assuming port inactive", e); + } catch (FileNotFoundException e) { + return SuccessOrFailure.fromBoolean(!andCheckStatus, "Received 404, assuming port inactive: " + e.getMessage()); + } catch (SSLHandshakeException e) { + return SuccessOrFailure.failureWithCondensedException("Received bad SSL response, assuming port inactive", e); + } catch (IOException e) { + return SuccessOrFailure.failureWithCondensedException("Error acquiring http connection, assuming port open but inactive", e); + } + } + + /** + * Formats the docker port into a particular form. + *

+ * Example: dockerPort.inFormat("https://$HOST:$EXTERNAL_PORT/api") + *

+ * Available options are: + *
    + *
  • $HOST - the hostname/ip address of the docker port
  • + *
  • $EXTERNAL_PORT - the external version of the docker port
  • + *
  • $INTERNAL_PORT - the internal version of the docker port
  • + *
+ * + * @param format a format string using the substitutions listed above + * @return formattedDockerPort the details of the {@link DockerPort} in the specified format + */ + public String inFormat(String format) { + return format + .replaceAll("\\$HOST", getIp()) + .replaceAll("\\$EXTERNAL_PORT", String.valueOf(getExternalPort())) + .replaceAll("\\$INTERNAL_PORT", String.valueOf(getInternalPort())); + + } + + @Override + public int hashCode() { + return Objects.hash(ip, portMapping); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + DockerPort other = (DockerPort) obj; + return Objects.equals(ip, other.ip) + && Objects.equals(portMapping, other.portMapping); + } + + @Override + public String toString() { + return "DockerPort [ip=" + ip + ", portMapping=" + portMapping + "]"; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/PortMapping.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/PortMapping.java new file mode 100644 index 0000000000..c906212031 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/PortMapping.java @@ -0,0 +1,65 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.util.Objects; + +public class PortMapping { + + private final int externalPort; + private final int internalPort; + + public PortMapping(int externalPort, int internalPort) { + this.externalPort = externalPort; + this.internalPort = internalPort; + } + + public int getExternalPort() { + return externalPort; + } + + public int getInternalPort() { + return internalPort; + } + + @Override + public int hashCode() { + return Objects.hash(externalPort, internalPort); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + PortMapping other = (PortMapping) obj; + return Objects.equals(externalPort, other.externalPort) + && Objects.equals(internalPort, other.internalPort); + } + + @Override + public String toString() { + return "PortMapping [externalPort=" + externalPort + ", internalPort=" + + internalPort + "]"; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Ports.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Ports.java new file mode 100644 index 0000000000..90b6185120 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Ports.java @@ -0,0 +1,92 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Stream; + +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +public class Ports { + + private static final Pattern PORT_PATTERN = Pattern.compile("((\\d+).(\\d+).(\\d+).(\\d+)):(\\d+)->(\\d+)/tcp"); + private static final int IP_ADDRESS = 1; + private static final int EXTERNAL_PORT = 6; + private static final int INTERNAL_PORT = 7; + + private static final String NO_IP_ADDRESS = "0.0.0.0"; + + private final List ports; + + public Ports(List ports) { + this.ports = ports; + } + + public Ports(DockerPort port) { + this(Collections.singletonList(port)); + } + + public Stream stream() { + return ports.stream(); + } + + public static Ports parseFromDockerComposePs(String psOutput, String dockerMachineIp) { + Assert.state(StringUtils.hasText(psOutput), "No container found"); + Matcher matcher = PORT_PATTERN.matcher(psOutput); + List ports = new ArrayList<>(); + while (matcher.find()) { + String matchedIpAddress = matcher.group(IP_ADDRESS); + String ip = matchedIpAddress.equals(NO_IP_ADDRESS) ? dockerMachineIp : matchedIpAddress; + int externalPort = Integer.parseInt(matcher.group(EXTERNAL_PORT)); + int internalPort = Integer.parseInt(matcher.group(INTERNAL_PORT)); + + ports.add(new DockerPort(ip, externalPort, internalPort)); + } + return new Ports(ports); + } + + @Override + public int hashCode() { + return Objects.hash(ports); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Ports other = (Ports) obj; + return Objects.equals(ports, other.ports); + } + + @Override + public String toString() { + return "Ports [ports=" + ports + "]"; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/State.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/State.java new file mode 100644 index 0000000000..f247c949c6 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/State.java @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +public enum State { + DOWN, PAUSED, UNHEALTHY, HEALTHY; + + /** + * Returns true if the container is up, unpaused and healthy. + * + * @return true if the container is up, unpaused and healthy + */ + public boolean isHealthy() { + return this == HEALTHY; + } + + /** + * Returns true if the container is up but not necessarily unpaused or healthy. + * + * @return true if the container is up but not necessarily unpaused or healthy + */ + public boolean isUp() { + return this != DOWN; + } + + /** + * Returns true if the container is paused. + * + * @return true if the container is paused + */ + public boolean isPaused() { + return this == PAUSED; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/Attempt.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/Attempt.java new file mode 100644 index 0000000000..315e93b761 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/Attempt.java @@ -0,0 +1,21 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +@FunctionalInterface +public interface Attempt { + boolean attempt() throws Exception; +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterHealthCheck.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterHealthCheck.java new file mode 100644 index 0000000000..a01277bba1 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterHealthCheck.java @@ -0,0 +1,76 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import static java.util.stream.Collectors.joining; + +import java.io.IOException; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; +import java.util.function.Function; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Cluster; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.State; + +@FunctionalInterface +public interface ClusterHealthCheck { + static ClusterHealthCheck serviceHealthCheck(List containerNames, HealthCheck> delegate) { + return transformingHealthCheck(cluster -> cluster.containers(containerNames), delegate); + } + + static ClusterHealthCheck serviceHealthCheck(String containerName, HealthCheck containerCheck) { + return transformingHealthCheck(cluster -> cluster.container(containerName), containerCheck); + } + + static ClusterHealthCheck transformingHealthCheck(Function transform, HealthCheck healthCheck) { + return cluster -> { + T target = transform.apply(cluster); + return healthCheck.isHealthy(target); + }; + } + + /** + * Returns a check that the native "healthcheck" status of the docker containers is not unhealthy. + * + *

Does not wait for DOWN or PAUSED containers, or containers with no healthcheck defined. + * + * @return native health checks + */ + static ClusterHealthCheck nativeHealthChecks() { + return cluster -> { + Set unhealthyContainers = new LinkedHashSet<>(); + try { + for (Container container : cluster.allContainers()) { + State state = container.state(); + if (state == State.UNHEALTHY) { + unhealthyContainers.add(container.getContainerName()); + } + } + if (!unhealthyContainers.isEmpty()) { + return SuccessOrFailure.failure( + "The following containers are not healthy: " + unhealthyContainers.stream().collect(joining(", "))); + } + return SuccessOrFailure.success(); + } catch (IOException e) { + return SuccessOrFailure.fromException(e); + } + }; + } + + SuccessOrFailure isClusterHealthy(Cluster cluster) throws InterruptedException; +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterWait.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterWait.java new file mode 100644 index 0000000000..2f141c6545 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterWait.java @@ -0,0 +1,73 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import com.jayway.awaitility.Awaitility; +import com.jayway.awaitility.core.ConditionTimeoutException; + +import java.util.Optional; +import java.util.concurrent.Callable; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import org.joda.time.ReadableDuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Cluster; + +public class ClusterWait { + private static final Logger log = LoggerFactory.getLogger(ClusterWait.class); + private final ClusterHealthCheck clusterHealthCheck; + private final ReadableDuration timeout; + + public ClusterWait(ClusterHealthCheck clusterHealthCheck, ReadableDuration timeout) { + this.clusterHealthCheck = clusterHealthCheck; + this.timeout = timeout; + } + + public void waitUntilReady(Cluster cluster) { + final AtomicReference> lastSuccessOrFailure = new AtomicReference<>( + Optional.empty()); + + log.info("Waiting for cluster to be healthy"); + try { + Awaitility.await() + .pollInterval(50, TimeUnit.MILLISECONDS) + .atMost(timeout.getMillis(), TimeUnit.MILLISECONDS) + .until(weHaveSuccess(cluster, lastSuccessOrFailure)); + } catch (ConditionTimeoutException e) { + throw new IllegalStateException(serviceDidNotStartupExceptionMessage(lastSuccessOrFailure)); + } + } + + private Callable weHaveSuccess(Cluster cluster, + AtomicReference> lastSuccessOrFailure) { + return () -> { + SuccessOrFailure successOrFailure = clusterHealthCheck.isClusterHealthy(cluster); + lastSuccessOrFailure.set(Optional.of(successOrFailure)); + return successOrFailure.succeeded(); + }; + } + + private static String serviceDidNotStartupExceptionMessage( + AtomicReference> lastSuccessOrFailure) { + String healthcheckFailureMessage = lastSuccessOrFailure.get() + .flatMap(SuccessOrFailure::toOptionalFailureMessage) + .orElse("The healthcheck did not finish before the timeout"); + + return "The cluster failed to pass a startup check: " + healthcheckFailureMessage; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/Exceptions.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/Exceptions.java new file mode 100644 index 0000000000..0b18084b08 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/Exceptions.java @@ -0,0 +1,29 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import java.util.stream.Collectors; +import org.apache.commons.lang3.exception.ExceptionUtils; + +public enum Exceptions { + ; + + public static String condensedStacktraceFor(Throwable throwable) { + return ExceptionUtils.getThrowableList(throwable).stream() + .map(t -> t.getClass().getCanonicalName() + ": " + t.getMessage()) + .collect(Collectors.joining("\n")); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HealthCheck.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HealthCheck.java new file mode 100644 index 0000000000..ac2e5b14b0 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HealthCheck.java @@ -0,0 +1,21 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +@FunctionalInterface +public interface HealthCheck { + SuccessOrFailure isHealthy(T target); +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HealthChecks.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HealthChecks.java new file mode 100644 index 0000000000..ddaa06c031 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HealthChecks.java @@ -0,0 +1,38 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import java.util.function.Function; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; + +public final class HealthChecks { + + private HealthChecks() {} + + public static HealthCheck toRespondOverHttp(int internalPort, Function urlFunction) { + return container -> container.portIsListeningOnHttp(internalPort, urlFunction); + } + + public static HealthCheck toRespond2xxOverHttp(int internalPort, Function urlFunction) { + return container -> container.portIsListeningOnHttpAndCheckStatus2xx(internalPort, urlFunction); + } + + public static HealthCheck toHaveAllPortsOpen() { + return Container::areAllPortsOpen; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailure.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailure.java new file mode 100644 index 0000000000..9d376fa1fc --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailure.java @@ -0,0 +1,93 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import java.util.Optional; +import java.util.function.Function; +import org.apache.commons.lang3.exception.ExceptionUtils; + +public class SuccessOrFailure { + + private Optional optionalFailureMessage; + + public SuccessOrFailure(Optional optionalFailureMessage) { + this.optionalFailureMessage = optionalFailureMessage; + } + + public static SuccessOrFailure onResultOf(Attempt attempt) { + try { + return fromBoolean(attempt.attempt(), "Attempt to complete healthcheck failed"); + } catch (Exception e) { + return fromException(e); + } + } + + public SuccessOrFailure mapFailure(Function mapper) { + if (this.succeeded()) { + return this; + } else { + return failure(mapper.apply(failureMessage())); + } + } + + protected Optional optionalFailureMessage() { + return optionalFailureMessage; + } + + public static SuccessOrFailure success() { + return SuccessOrFailure.of(Optional.empty()); + } + + private static SuccessOrFailure of(Optional empty) { + return new SuccessOrFailure(empty); + } + + public static SuccessOrFailure failure(String message) { + return SuccessOrFailure.of(Optional.of(message)); + } + + public static SuccessOrFailure failureWithCondensedException(String message, Exception exception) { + return failure(message + ":\n" + Exceptions.condensedStacktraceFor(exception)); + } + + public static SuccessOrFailure fromBoolean(boolean succeeded, String possibleFailureMessage) { + if (succeeded) { + return success(); + } else { + return failure(possibleFailureMessage); + } + } + + public boolean failed() { + return optionalFailureMessage().isPresent(); + } + + public boolean succeeded() { + return !failed(); + } + + public String failureMessage() { + return optionalFailureMessage().get(); + } + + public Optional toOptionalFailureMessage() { + return optionalFailureMessage(); + } + + public static SuccessOrFailure fromException(Exception exception) { + return SuccessOrFailure.failure("Encountered an exception: " + ExceptionUtils.getStackTrace(exception)); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/AggressiveShutdownStrategy.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/AggressiveShutdownStrategy.java new file mode 100644 index 0000000000..f8443d08a6 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/AggressiveShutdownStrategy.java @@ -0,0 +1,77 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import static java.util.stream.Collectors.toList; + +import java.io.IOException; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; + +/** + * Shuts down containers as fast as possible, without giving them time to finish + * IO or clean up any resources. + * + * @deprecated Use {@link ShutdownStrategy#KILL_DOWN} + */ +@Deprecated +public class AggressiveShutdownStrategy implements ShutdownStrategy { + + private static final Logger log = LoggerFactory.getLogger(AggressiveShutdownStrategy.class); + + @Override + public void shutdown(DockerCompose dockerCompose, Docker docker) throws IOException, InterruptedException { + List runningContainers = dockerCompose.ps(); + + log.info("Shutting down {}", runningContainers.stream().map(ContainerName::semanticName).collect(toList())); + if (removeContainersCatchingErrors(docker, runningContainers)) { + return; + } + + log.debug("First shutdown attempted failed due to btrfs volume error... retrying"); + if (removeContainersCatchingErrors(docker, runningContainers)) { + return; + } + + log.warn("Couldn't shut down containers due to btrfs volume error, " + + "see https://circleci.com/docs/docker-btrfs-error/ for more info."); + + log.info("Pruning networks"); + docker.pruneNetworks(); + } + + private static boolean removeContainersCatchingErrors(Docker docker, List runningContainers) throws IOException, InterruptedException { + try { + removeContainers(docker, runningContainers); + return true; + } catch (DockerExecutionException exception) { + return false; + } + } + + private static void removeContainers(Docker docker, List running) throws IOException, InterruptedException { + List rawContainerNames = running.stream() + .map(ContainerName::rawName) + .collect(toList()); + + docker.rm(rawContainerNames); + log.debug("Finished shutdown"); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/AggressiveShutdownWithNetworkCleanupStrategy.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/AggressiveShutdownWithNetworkCleanupStrategy.java new file mode 100644 index 0000000000..6d680aac72 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/AggressiveShutdownWithNetworkCleanupStrategy.java @@ -0,0 +1,68 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import static java.util.stream.Collectors.toList; + +import java.io.IOException; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; + +/** + * Shuts down containers as fast as possible while cleaning up any networks that were created. + * + * @deprecated Use {@link ShutdownStrategy#KILL_DOWN} + */ +@Deprecated +public class AggressiveShutdownWithNetworkCleanupStrategy implements ShutdownStrategy { + + private static final Logger log = LoggerFactory.getLogger(AggressiveShutdownWithNetworkCleanupStrategy.class); + + @Override + public void shutdown(DockerCompose dockerCompose, Docker docker) throws IOException, InterruptedException { + List runningContainers = dockerCompose.ps(); + + log.info("Shutting down {}", runningContainers.stream().map(ContainerName::semanticName).collect(toList())); + removeContainersCatchingErrors(docker, runningContainers); + removeNetworks(dockerCompose, docker); + + } + + private static void removeContainersCatchingErrors(Docker docker, List runningContainers) throws IOException, InterruptedException { + try { + removeContainers(docker, runningContainers); + } catch (DockerExecutionException exception) { + log.error("Error while trying to remove containers: {}", exception.getMessage()); + } + } + + private static void removeContainers(Docker docker, List running) throws IOException, InterruptedException { + List rawContainerNames = running.stream() + .map(ContainerName::rawName) + .collect(toList()); + + docker.rm(rawContainerNames); + log.debug("Finished shutdown"); + } + + private static void removeNetworks(DockerCompose dockerCompose, Docker docker) throws IOException, InterruptedException { + dockerCompose.down(); + docker.pruneNetworks(); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Command.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Command.java new file mode 100644 index 0000000000..f3836ea946 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Command.java @@ -0,0 +1,108 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.Arrays; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.function.Consumer; + +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static java.util.concurrent.Executors.newSingleThreadExecutor; +import static java.util.stream.Collectors.joining; + +public class Command { + + public static final int HOURS_TO_WAIT_FOR_STD_OUT_TO_CLOSE = 12; + + public static final int MINUTES_TO_WAIT_AFTER_STD_OUT_CLOSES = 1; + + private final Executable executable; + + private final Consumer logConsumer; + + public Command(Executable executable, Consumer logConsumer) { + this.executable = executable; + this.logConsumer = logConsumer; + } + + public String execute(ErrorHandler errorHandler, String... commands) throws IOException, InterruptedException { + ProcessResult result = run(commands); + + if (result.exitCode() != 0) { + errorHandler.handle(result.exitCode(), result.output(), executable.commandName(), commands); + } + + return result.output(); + } + + public static ErrorHandler throwingOnError() { + return (exitCode, output, commandName, commands) -> { + String message = + constructNonZeroExitErrorMessage(exitCode, commandName, commands) + "\nThe output was:\n" + output; + throw new DockerExecutionException(message); + }; + } + + private static String constructNonZeroExitErrorMessage(int exitCode, String commandName, String... commands) { + return "'" + commandName + " " + Arrays.stream(commands).collect(joining(" ")) + "' returned exit code " + + exitCode; + } + + private ProcessResult run(String... commands) throws IOException, InterruptedException { + Process process = executable.execute(commands); + + ExecutorService exec = newSingleThreadExecutor(); + Future outputProcessing = exec + .submit(() -> processOutputFrom(process)); + + String output = waitForResultFrom(outputProcessing); + + process.waitFor(MINUTES_TO_WAIT_AFTER_STD_OUT_CLOSES, TimeUnit.MINUTES); + exec.shutdown(); + + return new ProcessResult(process.exitValue(), output); + } + + private String processOutputFrom(Process process) { + return asReader(process.getInputStream()).lines() + .peek(logConsumer) + .collect(joining(System.lineSeparator())); + } + + private static String waitForResultFrom(Future outputProcessing) { + try { + return outputProcessing.get(HOURS_TO_WAIT_FOR_STD_OUT_TO_CLOSE, TimeUnit.HOURS); + } catch (InterruptedException | ExecutionException | TimeoutException e) { + throw new RuntimeException(e); + } + } + + private static BufferedReader asReader(InputStream inputStream) { + return new BufferedReader(new InputStreamReader(inputStream, UTF_8)); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ConflictingContainerRemovingDockerCompose.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ConflictingContainerRemovingDockerCompose.java new file mode 100644 index 0000000000..30511b5118 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ConflictingContainerRemovingDockerCompose.java @@ -0,0 +1,90 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.util.Assert; + +public class ConflictingContainerRemovingDockerCompose extends DelegatingDockerCompose { + private static final Logger log = LoggerFactory.getLogger(ConflictingContainerRemovingDockerCompose.class); + private static final Pattern NAME_CONFLICT_PATTERN = Pattern.compile("name \"([^\"]*)\" is already in use"); + + private final Docker docker; + private final int retryAttempts; + + public ConflictingContainerRemovingDockerCompose(DockerCompose dockerCompose, Docker docker) { + this(dockerCompose, docker, 1); + } + + public ConflictingContainerRemovingDockerCompose(DockerCompose dockerCompose, Docker docker, int retryAttempts) { + super(dockerCompose); + + Assert.state(retryAttempts >= 1, "retryAttempts must be at least 1, was " + retryAttempts); + this.docker = docker; + this.retryAttempts = retryAttempts; + } + + @Override + public void up() throws IOException, InterruptedException { + for (int currRetryAttempt = 0; currRetryAttempt <= retryAttempts; currRetryAttempt++) { + try { + getDockerCompose().up(); + return; + } catch (DockerExecutionException e) { + Set conflictingContainerNames = getConflictingContainerNames(e.getMessage()); + if (conflictingContainerNames.isEmpty()) { + // failed due to reason other than conflicting containers, so re-throw + throw e; + } + + log.debug("docker-compose up failed due to container name conflicts (container names: {}). " + + "Removing containers and attempting docker-compose up again (attempt {}).", + conflictingContainerNames, currRetryAttempt + 1); + removeContainers(conflictingContainerNames); + } + } + + throw new DockerExecutionException("docker-compose up failed"); + } + + private void removeContainers(Collection containerNames) throws IOException, InterruptedException { + try { + docker.rm(containerNames); + } catch (DockerExecutionException e) { + // there are cases such as in CircleCI where 'docker rm' returns a non-0 exit code and "fails", + // but container is still effectively removed as far as conflict resolution is concerned. Because + // of this, be permissive and do not fail task even if 'rm' fails. + log.debug("docker rm failed, but continuing execution", e); + } + } + + Set getConflictingContainerNames(String output) { + HashSet set = new HashSet<>(); + Matcher matcher = NAME_CONFLICT_PATTERN.matcher(output); + while (matcher.find()) { + set.add(matcher.group(1)); + } + return set; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DefaultDockerCompose.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DefaultDockerCompose.java new file mode 100644 index 0000000000..5d4a5e875e --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DefaultDockerCompose.java @@ -0,0 +1,257 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import static java.util.concurrent.TimeUnit.MILLISECONDS; +import static org.apache.commons.lang3.Validate.validState; +import static org.joda.time.Duration.standardMinutes; + +import com.github.zafarkhaja.semver.Version; +import com.jayway.awaitility.Awaitility; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import org.apache.commons.io.IOUtils; +import org.joda.time.Duration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ProjectName; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerNames; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Ports; +import org.springframework.util.StringUtils; + +public class DefaultDockerCompose implements DockerCompose { + + public static final Version VERSION_1_7_0 = Version.valueOf("1.7.0"); + private static final Duration COMMAND_TIMEOUT = standardMinutes(2); + private static final Duration LOG_WAIT_TIMEOUT = standardMinutes(30); + private static final Logger log = LoggerFactory.getLogger(DefaultDockerCompose.class); + + private final Command command; + private final DockerMachine dockerMachine; + private final DockerComposeExecutable rawExecutable; + + + public DefaultDockerCompose(DockerComposeFiles dockerComposeFiles, DockerMachine dockerMachine, ProjectName projectName) { + this(DockerComposeExecutable.builder() + .dockerComposeFiles(dockerComposeFiles) + .dockerConfiguration(dockerMachine) + .projectName(projectName) + .build(), dockerMachine); + } + + public DefaultDockerCompose(DockerComposeExecutable rawExecutable, DockerMachine dockerMachine) { + this.rawExecutable = rawExecutable; + this.command = new Command(rawExecutable, log::debug); + this.dockerMachine = dockerMachine; + } + + @Override + public void pull() throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), "pull"); + } + + @Override + public void build() throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), "build"); + } + + @Override + public void up() throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), "up", "-d"); + } + + @Override + public void down() throws IOException, InterruptedException { + command.execute(swallowingDownCommandDoesNotExist(), "down", "--volumes"); + } + + @Override + public void kill() throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), "kill"); + } + + @Override + public void rm() throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), "rm", "--force", "-v"); + } + + @Override + public void up(Container container) throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), "up", "-d", container.getContainerName()); + } + + @Override + public void start(Container container) throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), "start", container.getContainerName()); + } + + @Override + public void stop(Container container) throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), "stop", container.getContainerName()); + } + + @Override + public void kill(Container container) throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), "kill", container.getContainerName()); + } + + @Override + public String exec(DockerComposeExecOption dockerComposeExecOption, String containerName, + DockerComposeExecArgument dockerComposeExecArgument) throws IOException, InterruptedException { + verifyDockerComposeVersionAtLeast(VERSION_1_7_0, "You need at least docker-compose 1.7 to run docker-compose exec"); + String[] fullArgs = constructFullDockerComposeExecArguments(dockerComposeExecOption, containerName, dockerComposeExecArgument); + return command.execute(Command.throwingOnError(), fullArgs); + } + + @Override + public String run(DockerComposeRunOption dockerComposeRunOption, String containerName, + DockerComposeRunArgument dockerComposeRunArgument) throws IOException, InterruptedException { + String[] fullArgs = constructFullDockerComposeRunArguments(dockerComposeRunOption, containerName, dockerComposeRunArgument); + return command.execute(Command.throwingOnError(), fullArgs); + } + + private void verifyDockerComposeVersionAtLeast(Version targetVersion, String message) throws IOException, InterruptedException { + validState(version().greaterThanOrEqualTo(targetVersion), message); + } + + private Version version() throws IOException, InterruptedException { + String versionOutput = command.execute(Command.throwingOnError(), "-v"); + return DockerComposeVersion.parseFromDockerComposeVersion(versionOutput); + } + + private static String[] constructFullDockerComposeExecArguments(DockerComposeExecOption dockerComposeExecOption, + String containerName, DockerComposeExecArgument dockerComposeExecArgument) { + // The "-T" option here disables pseudo-TTY allocation, which is not useful here since we are not using + // terminal features here (e.g. we are not sending ^C to kill the executed process). + // Disabling pseudo-TTY allocation means this will work on OS's that don't support TTY (i.e. Windows) + List fullArgs = new ArrayList<>(); + fullArgs.add("exec"); + fullArgs.add("-T"); + fullArgs.addAll(dockerComposeExecOption.options()); + fullArgs.add(containerName); + fullArgs.addAll(dockerComposeExecArgument.arguments()); + return fullArgs.toArray(new String[fullArgs.size()]); + } + + private static String[] constructFullDockerComposeRunArguments(DockerComposeRunOption dockerComposeRunOption, + String containerName, DockerComposeRunArgument dockerComposeRunArgument) { + List fullArgs = new ArrayList<>(); + fullArgs.add("run"); + fullArgs.addAll(dockerComposeRunOption.options()); + fullArgs.add(containerName); + fullArgs.addAll(dockerComposeRunArgument.arguments()); + return fullArgs.toArray(new String[fullArgs.size()]); + } + + @Override + public List ps() throws IOException, InterruptedException { + String psOutput = command.execute(Command.throwingOnError(), "ps"); + return ContainerNames.parseFromDockerComposePs(psOutput); + } + + @Override + public Optional id(Container container) throws IOException, InterruptedException { + return id(container.getContainerName()); + } + + @Override + public String config() throws IOException, InterruptedException { + return command.execute(Command.throwingOnError(), "config"); + } + + @Override + public List services() throws IOException, InterruptedException { + String servicesOutput = command.execute(Command.throwingOnError(), "config", "--services"); + return Arrays.asList(servicesOutput.split("(\r|\n)+")); + } + + /** + * Blocks until all logs collected from the container. + * @return Whether the docker container terminated prior to log collection ending + */ + @Override + public boolean writeLogs(String container, OutputStream output) throws IOException { + try { + Awaitility.await() + .pollInterval(50, TimeUnit.MILLISECONDS) + .atMost(LOG_WAIT_TIMEOUT.getMillis(), TimeUnit.MILLISECONDS) + .until(() -> exists(container)); + Process executedProcess = followLogs(container); + IOUtils.copy(executedProcess.getInputStream(), output); + executedProcess.waitFor(COMMAND_TIMEOUT.getMillis(), MILLISECONDS); + } catch (InterruptedException e) { + return false; + } + return true; + } + + private boolean exists(final String containerName) throws IOException, InterruptedException { + return id(containerName).orElse(null) != null; + } + + private Optional id(String containerName) throws IOException, InterruptedException { + String id = command.execute(Command.throwingOnError(), "ps", "-q", containerName); + if (id.isEmpty()) { + return Optional.empty(); + } + return Optional.of(id); + } + + private Process followLogs(String container) throws IOException, InterruptedException { + if (version().greaterThanOrEqualTo(VERSION_1_7_0)) { + return rawExecutable.execute("logs", "--no-color", "--follow", container); + } + + return rawExecutable.execute("logs", "--no-color", container); + } + + @Override + public Ports ports(String service) throws IOException, InterruptedException { + return Ports.parseFromDockerComposePs(psOutput(service), dockerMachine.getIp()); + } + + private static ErrorHandler swallowingDownCommandDoesNotExist() { + return (exitCode, output, commandName, commands) -> { + if (downCommandWasPresent(output)) { + Command.throwingOnError().handle(exitCode, output, commandName, commands); + } + + log.warn("It looks like `docker-compose down` didn't work."); + log.warn("This probably means your version of docker-compose doesn't support the `down` command"); + log.warn("Updating to version 1.6+ of docker-compose is likely to fix this issue."); + }; + } + + private static boolean downCommandWasPresent(String output) { + return !output.contains("No such command"); + } + + private String psOutput(String service) throws IOException, InterruptedException { + String psOutput = command.execute(Command.throwingOnError(), "ps", service); + validState(StringUtils.hasText(psOutput), "No container with name '" + service + "' found"); + return psOutput; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DelegatingDockerCompose.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DelegatingDockerCompose.java new file mode 100644 index 0000000000..122ca51357 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DelegatingDockerCompose.java @@ -0,0 +1,130 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.List; +import java.util.Optional; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Ports; + +abstract class DelegatingDockerCompose implements DockerCompose { + private final DockerCompose dockerCompose; + + protected DelegatingDockerCompose(DockerCompose dockerCompose) { + this.dockerCompose = dockerCompose; + } + + @Override + public void pull() throws IOException, InterruptedException { + dockerCompose.pull(); + } + + @Override + public void build() throws IOException, InterruptedException { + dockerCompose.build(); + } + + @Override + public void up() throws IOException, InterruptedException { + dockerCompose.up(); + } + + @Override + public void down() throws IOException, InterruptedException { + dockerCompose.down(); + } + + @Override + public void kill() throws IOException, InterruptedException { + dockerCompose.kill(); + } + + @Override + public void rm() throws IOException, InterruptedException { + dockerCompose.rm(); + } + + @Override + public void up(Container container) throws IOException, InterruptedException { + dockerCompose.up(container); + } + + @Override + public void start(Container container) throws IOException, InterruptedException { + dockerCompose.start(container); + } + + @Override + public void stop(Container container) throws IOException, InterruptedException { + dockerCompose.stop(container); + } + + @Override + public void kill(Container container) throws IOException, InterruptedException { + dockerCompose.kill(container); + } + + @Override + public String exec(DockerComposeExecOption dockerComposeExecOption, String containerName, + DockerComposeExecArgument dockerComposeExecArgument) throws IOException, InterruptedException { + return dockerCompose.exec(dockerComposeExecOption, containerName, dockerComposeExecArgument); + } + + @Override + public String run(DockerComposeRunOption dockerComposeRunOption, String containerName, + DockerComposeRunArgument dockerComposeRunArgument) throws IOException, InterruptedException { + return dockerCompose.run(dockerComposeRunOption, containerName, dockerComposeRunArgument); + } + + @Override + public List ps() throws IOException, InterruptedException { + return dockerCompose.ps(); + } + + @Override + public Optional id(Container container) throws IOException, InterruptedException { + return dockerCompose.id(container); + } + + @Override + public String config() throws IOException, InterruptedException { + return dockerCompose.config(); + } + + @Override + public List services() throws IOException, InterruptedException { + return dockerCompose.services(); + } + + @Override + public boolean writeLogs(String container, OutputStream output) throws IOException { + return dockerCompose.writeLogs(container, output); + } + + @Override + public Ports ports(String service) throws IOException, InterruptedException { + return dockerCompose.ports(service); + } + + protected final DockerCompose getDockerCompose() { + return dockerCompose; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Docker.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Docker.java new file mode 100644 index 0000000000..9ece053f53 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Docker.java @@ -0,0 +1,95 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import com.github.zafarkhaja.semver.Version; + +import java.io.IOException; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.apache.commons.lang3.SystemUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.State; +import org.springframework.util.Assert; + +public class Docker { + + private static final Logger log = LoggerFactory.getLogger(Docker.class); + + // Without java escape characters: ^(\d+)\.(\d+)\.(\d+)(?:-.*)?$ + private static final Pattern VERSION_PATTERN = Pattern.compile("^Docker version (\\d+)\\.(\\d+)\\.(\\d+)(?:-.*)?$"); + private static final String HEALTH_STATUS_FORMAT = "--format=" + "{{if not .State.Running}}DOWN" + + "{{else if .State.Paused}}PAUSED" + "{{else if index .State \"Health\"}}" + + "{{if eq .State.Health.Status \"healthy\"}}HEALTHY" + "{{else}}UNHEALTHY{{end}}" + + "{{else}}HEALTHY{{end}}"; + private static final String HEALTH_STATUS_FORMAT_WINDOWS = HEALTH_STATUS_FORMAT.replaceAll("\"", "`\""); + + public static Version version() throws IOException, InterruptedException { + return new Docker(DockerExecutable.builder().dockerConfiguration(DockerMachine.localMachine().build()).build()) + .configuredVersion(); + } + + public Version configuredVersion() throws IOException, InterruptedException { + String versionString = command.execute(Command.throwingOnError(), "-v"); + Matcher matcher = VERSION_PATTERN.matcher(versionString); + Assert.state(matcher.matches(), "Unexpected output of docker -v: " + versionString); + return Version.forIntegers(Integer.parseInt(matcher.group(1)), Integer.parseInt(matcher.group(2)), + Integer.parseInt(matcher.group(3))); + } + + private final Command command; + + public Docker(DockerExecutable rawExecutable) { + this.command = new Command(rawExecutable, log::trace); + } + + public State state(String containerId) throws IOException, InterruptedException { + String formatString = SystemUtils.IS_OS_WINDOWS ? HEALTH_STATUS_FORMAT_WINDOWS : HEALTH_STATUS_FORMAT; + String stateString = command.execute(Command.throwingOnError(), "inspect", formatString, containerId); + return State.valueOf(stateString); + } + + public void rm(Collection containerNames) throws IOException, InterruptedException { + rm(containerNames.toArray(new String[containerNames.size()])); + } + + public void rm(String... containerNames) throws IOException, InterruptedException { + List commands = new ArrayList<>(); + commands.add("rm"); + commands.add("-f"); + if (containerNames != null) { + for (String containerName : containerNames) { + commands.add(containerName); + } + } + command.execute(Command.throwingOnError(), commands.toArray(new String[0])); + } + + public String listNetworks() throws IOException, InterruptedException { + return command.execute(Command.throwingOnError(), "network", "ls"); + } + + public String pruneNetworks() throws IOException, InterruptedException { + return command.execute(Command.throwingOnError(), "network", "prune", "--force"); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCommandLocations.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCommandLocations.java new file mode 100644 index 0000000000..c090d77c1e --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCommandLocations.java @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import static java.util.Arrays.asList; + +import java.io.File; +import java.util.List; +import java.util.Optional; +import java.util.function.Predicate; + +public class DockerCommandLocations { + private static final Predicate IS_NOT_NULL = path -> path != null; + private static final Predicate FILE_EXISTS = path -> new File(path).exists(); + + private final List possiblePaths; + + public DockerCommandLocations(String... possiblePaths) { + this.possiblePaths = asList(possiblePaths); + } + + public Optional preferredLocation() { + + return possiblePaths.stream() + .filter(IS_NOT_NULL) + .filter(FILE_EXISTS) + .findFirst(); + } + + @Override + public String toString() { + return "DockerCommandLocations{possiblePaths=" + possiblePaths + "}"; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCompose.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCompose.java new file mode 100644 index 0000000000..1a3059e837 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCompose.java @@ -0,0 +1,51 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import com.github.zafarkhaja.semver.Version; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.List; +import java.util.Optional; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Ports; + +public interface DockerCompose { + static Version version() throws IOException, InterruptedException { + return DockerComposeExecutable.version(); + } + void pull() throws IOException, InterruptedException; + void build() throws IOException, InterruptedException; + void up() throws IOException, InterruptedException; + void down() throws IOException, InterruptedException; + void kill() throws IOException, InterruptedException; + void rm() throws IOException, InterruptedException; + void up(Container container) throws IOException, InterruptedException; + void start(Container container) throws IOException, InterruptedException; + void stop(Container container) throws IOException, InterruptedException; + void kill(Container container) throws IOException, InterruptedException; + String exec(DockerComposeExecOption dockerComposeExecOption, String containerName, DockerComposeExecArgument dockerComposeExecArgument) throws IOException, InterruptedException; + String run(DockerComposeRunOption dockerComposeRunOption, String containerName, DockerComposeRunArgument dockerComposeRunArgument) throws IOException, InterruptedException; + List ps() throws IOException, InterruptedException; + Optional id(Container container) throws IOException, InterruptedException; + String config() throws IOException, InterruptedException; + List services() throws IOException, InterruptedException; + boolean writeLogs(String container, OutputStream output) throws IOException; + Ports ports(String service) throws IOException, InterruptedException; +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecArgument.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecArgument.java new file mode 100644 index 0000000000..b24369ca9a --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecArgument.java @@ -0,0 +1,66 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.util.Arrays; +import java.util.List; + +public class DockerComposeExecArgument { + + private List arguments; + + public DockerComposeExecArgument(List arguments) { + this.arguments = arguments; + } + + public List arguments() { + return arguments; + } + + public static DockerComposeExecArgument of(List arguments) { + return new DockerComposeExecArgument(arguments); + } + + public static DockerComposeExecArgument arguments(String... arguments) { + return DockerComposeExecArgument.of(Arrays.asList(arguments)); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((arguments == null) ? 0 : arguments.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + DockerComposeExecArgument other = (DockerComposeExecArgument) obj; + if (arguments == null) { + if (other.arguments != null) + return false; + } else if (!arguments.equals(other.arguments)) + return false; + return true; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecOption.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecOption.java new file mode 100644 index 0000000000..61940d6b58 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecOption.java @@ -0,0 +1,71 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public class DockerComposeExecOption { + + private List options; + + public DockerComposeExecOption(List options) { + this.options = options; + } + + public List options() { + return options; + } + + public static DockerComposeExecOption options(String... options) { + return DockerComposeExecOption.of(Arrays.asList(options)); + } + + private static DockerComposeExecOption of(List asList) { + return new DockerComposeExecOption(asList); + } + + public static DockerComposeExecOption noOptions() { + return DockerComposeExecOption.of(Collections.emptyList()); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((options == null) ? 0 : options.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + DockerComposeExecOption other = (DockerComposeExecOption) obj; + if (options == null) { + if (other.options != null) + return false; + } else if (!options.equals(other.options)) + return false; + return true; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecutable.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecutable.java new file mode 100644 index 0000000000..f8e0f895f8 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecutable.java @@ -0,0 +1,172 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import com.github.zafarkhaja.semver.Version; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ProjectName; + +public class DockerComposeExecutable implements Executable { + + private static final Logger log = LoggerFactory.getLogger(DockerComposeExecutable.class); + + private static final DockerCommandLocations DOCKER_COMPOSE_LOCATIONS = new DockerCommandLocations( + System.getenv("DOCKER_COMPOSE_LOCATION"), + "/usr/local/bin/docker-compose", + "/usr/bin/docker-compose", + "/usr/local/bin/docker", + "/usr/bin/docker" + ); + + private static String defaultDockerComposePath() { + String pathToUse = DOCKER_COMPOSE_LOCATIONS.preferredLocation() + .orElseThrow(() -> new IllegalStateException( + "Could not find docker-compose, looked in: " + DOCKER_COMPOSE_LOCATIONS)); + + log.debug("Using docker-compose found at " + pathToUse); + + return pathToUse; + } + + static Version version() throws IOException, InterruptedException { + Command dockerCompose = new Command(new Executable() { + + @Override + public String commandName() { + File file = new File(defaultDockerComposePath()); + return file.getName().equals("docker-compose") ? "docker-compose" : "docker"; + } + + @Override + public Process execute(String... commands) throws IOException { + List args = new ArrayList<>(); + String dockerComposePath = defaultDockerComposePath(); + args.add(dockerComposePath); + if(commandName().equals("docker")) { + args.add("compose"); + } + args.addAll(Arrays.asList(commands)); + log.debug("execute:{}", args); + return new ProcessBuilder(args).redirectErrorStream(true).start(); + } + }, log::debug); + + String versionOutput = dockerCompose.execute(Command.throwingOnError(), "-v"); + return DockerComposeVersion.parseFromDockerComposeVersion(versionOutput); + } + + private DockerComposeFiles dockerComposeFiles; + + private DockerConfiguration dockerConfiguration; + + private ProjectName projectName = ProjectName.random(); + + public DockerComposeExecutable(DockerComposeFiles dockerComposeFiles, DockerConfiguration dockerConfiguration, ProjectName projectName) { + this.dockerComposeFiles = dockerComposeFiles; + this.dockerConfiguration = dockerConfiguration; + if (projectName != null) { + this.projectName = projectName; + } + } + + public DockerComposeFiles dockerComposeFiles() { + return dockerComposeFiles; + } + + public DockerConfiguration dockerConfiguration() { + return dockerConfiguration; + } + + public ProjectName projectName() { + return projectName; +// return projectName != null ? projectName : ProjectName.random(); + } + + @Override + public final String commandName() { + File file = new File(defaultDockerComposePath()); + return file.getName().equals("docker-compose") ? "docker-compose" : "docker"; + } + + protected String dockerComposePath() { + return defaultDockerComposePath(); + } + + @Override + public Process execute(String... commands) throws IOException { + DockerForMacHostsIssue.issueWarning(); + + List args = new ArrayList<>(); + String dockerComposePath = dockerComposePath(); + args.add(dockerComposePath); + if(commandName().equals("docker")) { + args.add("compose"); + } + // if a single option is provided that starts with - skips the file commands. + if (commands.length > 1 || commands[0].charAt(0) != '-') { + args.addAll(projectName().constructComposeFileCommand()); + args.addAll(dockerComposeFiles().constructComposeFileCommand()); + } + args.addAll(Arrays.asList(commands)); + + log.debug("execute:{}", args); + return dockerConfiguration().configuredDockerComposeProcess() + .command(args) + .redirectErrorStream(true) + .start(); + } + + public static Builder builder() { + return new Builder(); + } + + + public static class Builder { + private DockerComposeFiles dockerComposeFiles; + + private DockerConfiguration dockerConfiguration; + + private ProjectName projectName; + + public Builder dockerComposeFiles(DockerComposeFiles dockerComposeFiles) { + this.dockerComposeFiles = dockerComposeFiles; + return this; + } + + public Builder dockerConfiguration(DockerConfiguration dockerConfiguration) { + this.dockerConfiguration = dockerConfiguration; + return this; + } + + public Builder projectName(ProjectName projectName) { + this.projectName = projectName; + return this; + } + + public DockerComposeExecutable build() { + return new DockerComposeExecutable(dockerComposeFiles, dockerConfiguration, projectName); + } + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeRunArgument.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeRunArgument.java new file mode 100644 index 0000000000..41f12899e6 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeRunArgument.java @@ -0,0 +1,66 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.util.Arrays; +import java.util.List; + +public class DockerComposeRunArgument { + + private List arguments; + + public DockerComposeRunArgument(List arguments) { + this.arguments = arguments; + } + + public List arguments() { + return arguments; + } + + public static DockerComposeRunArgument arguments(String... arguments) { + return DockerComposeRunArgument.of(Arrays.asList(arguments)); + } + + private static DockerComposeRunArgument of(List asList) { + return new DockerComposeRunArgument(asList); + } + + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((arguments == null) ? 0 : arguments.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + DockerComposeRunArgument other = (DockerComposeRunArgument) obj; + if (arguments == null) { + if (other.arguments != null) + return false; + } else if (!arguments.equals(other.arguments)) + return false; + return true; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeRunOption.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeRunOption.java new file mode 100644 index 0000000000..31280c2f8a --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeRunOption.java @@ -0,0 +1,67 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.util.Arrays; +import java.util.List; + +public class DockerComposeRunOption { + + private List options; + + public DockerComposeRunOption(List options) { + this.options = options; + } + + public List options() { + return options; + } + + public static DockerComposeRunOption options(String... options) { + return DockerComposeRunOption.of(Arrays.asList(options)); + } + + private static DockerComposeRunOption of(List asList) { + return new DockerComposeRunOption(asList); + } + + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((options == null) ? 0 : options.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + DockerComposeRunOption other = (DockerComposeRunOption) obj; + if (options == null) { + if (other.options != null) + return false; + } else if (!options.equals(other.options)) + return false; + return true; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeVersion.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeVersion.java new file mode 100644 index 0000000000..52c715c890 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeVersion.java @@ -0,0 +1,48 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import com.github.zafarkhaja.semver.Version; + +public final class DockerComposeVersion { + + private DockerComposeVersion() { + } + + //docker-compose version format is like 1.7.0rc1, which can't be parsed by java-semver + //here we only pass 1.7.0 to java-semver + public static Version parseFromDockerComposeVersion(String versionOutput) { + String[] splitOnSeparator = versionOutput.split(" "); + String version = null; + for (String value : splitOnSeparator) { + if (Character.isDigit(value.charAt(0))) { + version = value; + break; + } else if (value.charAt(0) == 'v' && value.length() > 1 && Character.isDigit(value.charAt(1))) { + version = value.substring(1); + } + } + StringBuilder builder = new StringBuilder(); + for (int i = 0; i < version.length(); i++) { + if (version.charAt(i) >= '0' && version.charAt(i) <= '9' || version.charAt(i) == '.') { + builder.append(version.charAt(i)); + } else { + return Version.valueOf(builder.toString()); + } + } + return Version.valueOf(builder.toString()); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerConfiguration.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerConfiguration.java new file mode 100644 index 0000000000..24bd548fee --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerConfiguration.java @@ -0,0 +1,20 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +public interface DockerConfiguration { + ProcessBuilder configuredDockerComposeProcess(); +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerExecutable.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerExecutable.java new file mode 100644 index 0000000000..484769e49e --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerExecutable.java @@ -0,0 +1,88 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DockerExecutable implements Executable { + private static final Logger log = LoggerFactory.getLogger(DockerExecutable.class); + + private static final DockerCommandLocations DOCKER_LOCATIONS = new DockerCommandLocations( + System.getenv("DOCKER_LOCATION"), + "/usr/local/bin/docker", + "/usr/bin/docker" + ); + + private DockerConfiguration dockerConfiguration; + + public DockerExecutable(DockerConfiguration dockerConfiguration) { + this.dockerConfiguration = dockerConfiguration; + } + + public DockerConfiguration dockerConfiguration() { + return dockerConfiguration; + } + + @Override + public final String commandName() { + return "docker"; + } + + protected String dockerPath() { + String pathToUse = DOCKER_LOCATIONS.preferredLocation() + .orElseThrow(() -> new IllegalStateException( + "Could not find docker, looked in: " + DOCKER_LOCATIONS)); + + log.debug("Using docker found at " + pathToUse); + + return pathToUse; + } + + @Override + public Process execute(String... commands) throws IOException { + List args = new ArrayList<>(); + args.add(dockerPath()); + args.addAll(Arrays.asList(commands)); + + return dockerConfiguration().configuredDockerComposeProcess() + .command(args) + .redirectErrorStream(true) + .start(); + } + + public static DockerExecutable.Builder builder() { + return new Builder(); + } + + public static class Builder { + + private DockerConfiguration dockerConfiguration; + + public Builder dockerConfiguration(DockerConfiguration dockerConfiguration) { + this.dockerConfiguration = dockerConfiguration; + return this; + } + + public DockerExecutable build() { + return new DockerExecutable(dockerConfiguration); + } + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerExecutionException.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerExecutionException.java new file mode 100644 index 0000000000..b765fb3636 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerExecutionException.java @@ -0,0 +1,25 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +public class DockerExecutionException extends RuntimeException { + public DockerExecutionException() { + } + + public DockerExecutionException(String message) { + super(message); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerForMacHostsIssue.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerForMacHostsIssue.java new file mode 100644 index 0000000000..1c2fa4a3a7 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerForMacHostsIssue.java @@ -0,0 +1,68 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.File; +import java.io.IOException; + +import org.springframework.util.FileCopyUtils; + +/** + * Check whether Mac OS X users have pointed localunixsocket to localhost. + * + *

docker-compose takes an order of magnitude longer to run commands without this tip! + * + * @see Docker Compose Issue #3419 + */ +public class DockerForMacHostsIssue { + + private static final String REDIRECT_LINE = "127.0.0.1 localunixsocket\n"; + private static final String WARNING_MESSAGE = "\n\n **** WARNING: Your tests may be slow ****\n" + + "Please add the following line to /etc/hosts:\n " + + REDIRECT_LINE + + "\nFor more information, see https://github.com/docker/compose/issues/3419#issuecomment-221793401\n\n"; + private static volatile boolean checkPerformed = false; + + @SuppressWarnings("checkstyle:BanSystemErr") + public static void issueWarning() { + if (!checkPerformed) { + if (onMacOsX() && !localunixsocketRedirectedInEtcHosts()) { + System.err.print(WARNING_MESSAGE); + } + } + checkPerformed = true; + } + + private static boolean onMacOsX() { + return System.getProperty("os.name", "generic").equals("Mac OS X"); + } + + private static boolean localunixsocketRedirectedInEtcHosts() { + try { + byte[] bytes = FileCopyUtils.copyToByteArray(new File("/etc/hosts")); + String content = new String(bytes); + return content.contains(REDIRECT_LINE); + } catch (IOException e) { + return true; // Better to be silent than issue false warnings + } + } + + public static void main(String[] args) { + issueWarning(); + } + + private DockerForMacHostsIssue() {} +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ErrorHandler.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ErrorHandler.java new file mode 100644 index 0000000000..cd5269fe85 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ErrorHandler.java @@ -0,0 +1,21 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +@FunctionalInterface +public interface ErrorHandler { + void handle(int exitCode, String output, String commandName, String... commands); +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Executable.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Executable.java new file mode 100644 index 0000000000..27a291ff33 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Executable.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; + +public interface Executable { + + String commandName(); + + Process execute(String... commands) throws IOException; + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/GracefulShutdownStrategy.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/GracefulShutdownStrategy.java new file mode 100644 index 0000000000..739def945c --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/GracefulShutdownStrategy.java @@ -0,0 +1,40 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; + +/** + * Send SIGTERM to containers first, allowing them up to 10 seconds to + * terminate before killing and rm-ing them. + */ +public class GracefulShutdownStrategy implements ShutdownStrategy { + + private static final Logger log = LoggerFactory.getLogger(GracefulShutdownStrategy.class); + + @Override + public void shutdown(DockerCompose dockerCompose, Docker docker) throws IOException, InterruptedException { + log.debug("Killing docker-compose cluster"); + dockerCompose.down(); + dockerCompose.kill(); + dockerCompose.rm(); + docker.pruneNetworks(); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/KillDownShutdownStrategy.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/KillDownShutdownStrategy.java new file mode 100644 index 0000000000..3af89427f8 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/KillDownShutdownStrategy.java @@ -0,0 +1,45 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; + +/** + * Shuts down fast but cleanly by issuing a kill (fast shutdown) followed by a down (thorough cleanup) + * + *

"down" would be ideal as a single command if it didn't first execute an impotent SIGTERM, which + * many Docker images simply ignore due to being run by bash as process 1. We don't need a graceful + * shutdown period anyway since the tests are done and we're destroying the docker image. + */ +public class KillDownShutdownStrategy implements ShutdownStrategy { + + private static final Logger log = LoggerFactory.getLogger(KillDownShutdownStrategy.class); + + @Override + public void shutdown(DockerCompose dockerCompose, Docker docker) + throws IOException, InterruptedException { + log.debug("Killing docker-compose cluster"); + dockerCompose.kill(); + log.debug("Downing docker-compose cluster"); + dockerCompose.down(); + log.debug("docker-compose cluster killed"); + docker.pruneNetworks(); + log.debug("Networks pruned"); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ProcessResult.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ProcessResult.java new file mode 100644 index 0000000000..15051f5d0c --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ProcessResult.java @@ -0,0 +1,34 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +public class ProcessResult { + private int exitCode; + private final String output; + + public ProcessResult(int exitCode, String output) { + this.exitCode = exitCode; + this.output = output; + } + + public int exitCode() { + return exitCode; + } + + public String output() { + return output; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Retryer.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Retryer.java new file mode 100644 index 0000000000..817df8b7b3 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Retryer.java @@ -0,0 +1,58 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import org.joda.time.Duration; +import org.joda.time.ReadableDuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Retryer { + private static final Logger log = LoggerFactory.getLogger(Retryer.class); + public static final ReadableDuration STANDARD_DELAY = Duration.standardSeconds(5); + + public interface RetryableDockerOperation { + T call() throws IOException, InterruptedException; + } + + private final int retryAttempts; + private final ReadableDuration delay; + + public Retryer(int retryAttempts, ReadableDuration delay) { + this.retryAttempts = retryAttempts; + this.delay = delay; + } + + public T runWithRetries(RetryableDockerOperation operation) throws IOException, InterruptedException { + DockerExecutionException lastExecutionException = null; + for (int i = 0; i <= retryAttempts; i++) { + try { + return operation.call(); + } catch (DockerExecutionException e) { + lastExecutionException = e; + log.warn("Caught exception: {}", e.getMessage()); + log.warn("Retrying after {}", delay); + if (i < retryAttempts) { + Thread.sleep(delay.getMillis()); + } + } + } + + log.error("Exhausted all retry attempts. Tried {} times.", retryAttempts); + throw lastExecutionException; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryingDockerCompose.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryingDockerCompose.java new file mode 100644 index 0000000000..64b0a62cb4 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryingDockerCompose.java @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.util.List; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; + +public class RetryingDockerCompose extends DelegatingDockerCompose { + private final Retryer retryer; + + public RetryingDockerCompose(int retryAttempts, DockerCompose dockerCompose) { + this(new Retryer(retryAttempts, Retryer.STANDARD_DELAY), dockerCompose); + } + + public RetryingDockerCompose(Retryer retryer, DockerCompose dockerCompose) { + super(dockerCompose); + this.retryer = retryer; + } + + @Override + public void up() throws IOException, InterruptedException { + retryer.runWithRetries(() -> { + super.up(); + return null; + }); + } + + @Override + public List ps() throws IOException, InterruptedException { + return retryer.runWithRetries(super::ps); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/SkipShutdownStrategy.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/SkipShutdownStrategy.java new file mode 100644 index 0000000000..d61e34a861 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/SkipShutdownStrategy.java @@ -0,0 +1,38 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; + +public class SkipShutdownStrategy implements ShutdownStrategy { + + private static final Logger log = LoggerFactory.getLogger(SkipShutdownStrategy.class); + + @Override + public void shutdown(DockerCompose dockerCompose, Docker docker) { + log.warn("\n" + + "******************************************************************************************\n" + + "* docker-compose-rule has been configured to skip docker-compose shutdown: *\n" + + "* this means the containers will be left running after tests finish executing. *\n" + + "* If you see this message when running on CI it means you are potentially abandoning *\n" + + "* long running processes and leaking resources. *\n" + + "******************************************************************************************"); + } + + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/DoNothingLogCollector.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/DoNothingLogCollector.java new file mode 100644 index 0000000000..24e78e7dd9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/DoNothingLogCollector.java @@ -0,0 +1,32 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.logging; + +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +public class DoNothingLogCollector implements LogCollector { + + @Override + public void startCollecting(DockerCompose dockerCompose) { + + } + + @Override + public void stopCollecting() { + + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/FileLogCollector.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/FileLogCollector.java new file mode 100644 index 0000000000..03249e88ac --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/FileLogCollector.java @@ -0,0 +1,100 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.logging; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.Files; +import java.util.List; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import org.apache.commons.lang3.Validate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; +import org.springframework.util.Assert; + +public class FileLogCollector implements LogCollector { + + private static final Logger log = LoggerFactory.getLogger(FileLogCollector.class); + + private static final long STOP_TIMEOUT_IN_MILLIS = 50; + + private final File logDirectory; + + private ExecutorService executor = null; + + public FileLogCollector(File logDirectory) { + Assert.state(!logDirectory.isFile(), "Log directory cannot be a file"); + if (!logDirectory.exists()) { + Validate.isTrue(logDirectory.mkdirs(), "Error making log directory: " + logDirectory.getAbsolutePath()); + } + this.logDirectory = logDirectory; + } + + public static LogCollector fromPath(String path) { + return new FileLogCollector(new File(path)); + } + + @Override + public synchronized void startCollecting(DockerCompose dockerCompose) throws IOException, InterruptedException { + if (executor != null) { + throw new RuntimeException("Cannot start collecting the same logs twice"); + } + + List serviceNames = dockerCompose.services(); + if (serviceNames.size() == 0) { + return; + } + executor = Executors.newFixedThreadPool(serviceNames.size()); + serviceNames.stream().forEachOrdered(service -> this.collectLogs(service, dockerCompose)); + } + + private void collectLogs(String container, DockerCompose dockerCompose) { + executor.submit(() -> { + File outputFile = new File(logDirectory, container + ".log"); + try { + Files.createFile(outputFile.toPath()); + } catch (final FileAlreadyExistsException e) { + // ignore + } catch (final IOException e) { + throw new RuntimeException("Error creating log file", e); + } + log.info("Writing logs for container '{}' to '{}'", container, outputFile.getAbsolutePath()); + try (FileOutputStream outputStream = new FileOutputStream(outputFile)) { + dockerCompose.writeLogs(container, outputStream); + } catch (IOException e) { + throw new RuntimeException("Error reading log", e); + } + }); + } + + @Override + public synchronized void stopCollecting() throws InterruptedException { + if (executor == null) { + return; + } + if (!executor.awaitTermination(STOP_TIMEOUT_IN_MILLIS, TimeUnit.MILLISECONDS)) { + log.warn("docker containers were still running when log collection stopped"); + executor.shutdownNow(); + } + executor = null; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogCollector.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogCollector.java new file mode 100644 index 0000000000..c6d5c7b8ba --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogCollector.java @@ -0,0 +1,28 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.logging; + +import java.io.IOException; + +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +public interface LogCollector { + + void startCollecting(DockerCompose dockerCompose) throws IOException, InterruptedException; + + void stopCollecting() throws InterruptedException; + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogDirectory.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogDirectory.java new file mode 100644 index 0000000000..17485a851c --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogDirectory.java @@ -0,0 +1,53 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.logging; + +import java.util.Optional; + +public class LogDirectory { + + private LogDirectory() {} + + /** + * For tests running on CircleCI, save logs into $CIRCLE_ARTIFACTS/dockerLogs/<testClassName>. + * This ensures partial logs can be recovered if the build is cancelled or times out, and + * also avoids needless copying. + * + * Otherwise, save logs from local runs to a folder inside $project/build/dockerLogs named + * after the test class. + * + * @param testClass the JUnit test class whose name will appear on the log folder + * @return log directory + */ + public static String circleAwareLogDirectory(Class testClass) { + return circleAwareLogDirectory(testClass.getSimpleName()); + } + + public static String circleAwareLogDirectory(String logDirectoryName) { + String artifactRoot = Optional.ofNullable(System.getenv("CIRCLE_ARTIFACTS")).orElse("build"); + return artifactRoot + "/dockerLogs/" + logDirectoryName; + } + + /** + * Save logs into a new folder, $project/build/dockerLogs/<testClassName>. + * + * @param testClass the JUnit test class whose name will appear on the log folder + * @return log directory + */ + public static String gradleDockerLogsDirectory(Class testClass) { + return "build/dockerLogs/" + testClass.getSimpleName(); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/AvailablePortMatcher.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/AvailablePortMatcher.java new file mode 100644 index 0000000000..1409ae5d37 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/AvailablePortMatcher.java @@ -0,0 +1,51 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.matchers; + +import java.util.Collection; +import java.util.stream.Collectors; +import org.hamcrest.Description; +import org.hamcrest.TypeSafeMatcher; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; + +public class AvailablePortMatcher extends TypeSafeMatcher> { + + @Override + public void describeTo(Description description) { + description.appendText("No ports to be unavailable"); + } + + @Override + protected boolean matchesSafely(Collection unavailablePorts) { + return unavailablePorts.isEmpty(); + } + + @Override + protected void describeMismatchSafely(Collection unavailablePorts, Description mismatchDescription) { + mismatchDescription.appendValueList("These ports were unavailable:\n", "\n", ".", buildClosedPortsErrorMessage(unavailablePorts)); + } + + private static Collection buildClosedPortsErrorMessage(Collection unavailablePorts) { + return unavailablePorts.stream() + .map(port -> "For host with ip address: " + port.getIp() + " external port '" + port.getExternalPort() + "' mapped to internal port '" + port.getInternalPort() + "' was unavailable") + .collect(Collectors.toList()); + } + + public static AvailablePortMatcher areAvailable() { + return new AvailablePortMatcher(); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/resources/application.properties b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/resources/application.properties new file mode 100644 index 0000000000..e69de29bb2 diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/AggressiveShutdownStrategyTest.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/AggressiveShutdownStrategyTest.java new file mode 100644 index 0000000000..a50b88835c --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/AggressiveShutdownStrategyTest.java @@ -0,0 +1,63 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose; + +import static org.mockito.ArgumentMatchers.anyList; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +import org.junit.jupiter.api.Test; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerExecutionException; + +public class AggressiveShutdownStrategyTest { + + private final DockerCompose mockDockerCompose = mock(DockerCompose.class); + private final Docker mockDocker = mock(Docker.class); + + private static final String btrfs_message = "'docker rm -f test-1.container.name test-2.container.name' " + + "returned exit code 1\nThe output was:\nFailed to remove container (test-1.container.name): " + + "Error response from daemon: Driver btrfs failed to remove root filesystem "; + + @Test + public void first_btrfs_error_should_be_caught_silently_and_retried() throws Exception { + doThrow(new DockerExecutionException(btrfs_message)) + .doNothing() + .when(mockDocker) + .rm(anyList()); + + ShutdownStrategy.AGGRESSIVE.shutdown(mockDockerCompose, mockDocker); + + verify(mockDocker, times(2)).rm(anyList()); + } + + @Test + public void after_two_btrfs_failures_we_should_just_log_and_continue() throws Exception { + doThrow(new DockerExecutionException(btrfs_message)) + .doThrow(new DockerExecutionException(btrfs_message)) + .when(mockDocker) + .rm(anyList()); + + ShutdownStrategy.AGGRESSIVE.shutdown(mockDockerCompose, mockDocker); + + verify(mockDocker, times(2)).rm(anyList()); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/AggressiveShutdownWithNetworkCleanupStrategyTest.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/AggressiveShutdownWithNetworkCleanupStrategyTest.java new file mode 100644 index 0000000000..5ad5037f7c --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/AggressiveShutdownWithNetworkCleanupStrategyTest.java @@ -0,0 +1,51 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose; + +import static org.mockito.ArgumentMatchers.anyList; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +import org.junit.jupiter.api.Test; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerExecutionException; + +public class AggressiveShutdownWithNetworkCleanupStrategyTest { + +// @Rule +// public final ExpectedException exception = ExpectedException.none(); + + private final DockerCompose mockDockerCompose = mock(DockerCompose.class); + private final Docker mockDocker = mock(Docker.class); + + private static final String error_msg = "Random DockerExecutionException message"; + + @Test + public void docker_compose_down_should_be_called_despite_docker_rm_throwing_exception() throws Exception { + doThrow(new DockerExecutionException(error_msg)) + .when(mockDocker) + .rm(anyList()); + + ShutdownStrategy.AGGRESSIVE_WITH_NETWORK_CLEANUP.shutdown(mockDockerCompose, mockDocker); + + verify(mockDockerCompose, times(1)).down(); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/TestContainerNames.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/TestContainerNames.java new file mode 100644 index 0000000000..2bb20047a9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/TestContainerNames.java @@ -0,0 +1,42 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose; + +import static java.util.stream.Collectors.toList; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; + +public class TestContainerNames { + + private TestContainerNames() {} + + public static List of(String... semanticNames) { + return Arrays.stream(semanticNames) + .map(TestContainerNames::testContainerName) + .collect(toList()); + } + + private static ContainerName testContainerName(String testName) { + return ContainerName.builder() + .semanticName(testName) + .rawName("123456_" + testName + "_1") + .build(); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/AdditionalEnvironmentValidatorTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/AdditionalEnvironmentValidatorTests.java new file mode 100644 index 0000000000..63905f6cb5 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/AdditionalEnvironmentValidatorTests.java @@ -0,0 +1,52 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; + +import java.util.HashMap; +import java.util.Map; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.AdditionalEnvironmentValidator; + +public class AdditionalEnvironmentValidatorTests { + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void throw_exception_when_additional_environment_variables_contain_docker_variables() { + Map variables = new HashMap<>(); + variables.put("DOCKER_HOST", "tcp://some-host:2376"); + variables.put("SOME_VARIABLE", "Some Value"); + exception.expect(IllegalStateException.class); + exception.expectMessage("The following variables"); + exception.expectMessage("DOCKER_HOST"); + exception.expectMessage("cannot exist in your additional environment"); + AdditionalEnvironmentValidator.validate(variables); + } + + @Test + public void validate_arbitrary_environment_variables() { + Map variables = new HashMap<>(); + variables.put("SOME_VARIABLE", "Some Value"); + + assertThat(AdditionalEnvironmentValidator.validate(variables), is(variables)); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonEnvironmentValidatorTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonEnvironmentValidatorTests.java new file mode 100644 index 0000000000..16ecf3a3d9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonEnvironmentValidatorTests.java @@ -0,0 +1,59 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; + +import java.util.HashMap; +import java.util.Map; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DaemonEnvironmentValidator; + +public class DaemonEnvironmentValidatorTests { + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void validate_successfully_when_docker_environment_does_not_contain_docker_variables() { + Map variables = new HashMap<>(); + variables.put("SOME_VARIABLE", "SOME_VALUE"); + variables.put("ANOTHER_VARIABLE", "ANOTHER_VALUE"); + + DaemonEnvironmentValidator.instance().validateEnvironmentVariables(variables); + } + + @Test + public void throw_exception_when_docker_environment_contains_illegal_docker_variables() { + Map variables = new HashMap<>(); + variables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + variables.put(DOCKER_TLS_VERIFY, "1"); + variables.put(DOCKER_CERT_PATH, "/path/to/certs"); + + exception.expect(IllegalStateException.class); + exception.expectMessage("These variables were set:"); + exception.expectMessage(DOCKER_HOST); + exception.expectMessage(DOCKER_CERT_PATH); + exception.expectMessage(DOCKER_TLS_VERIFY); + exception.expectMessage("They cannot be set when connecting to a local docker daemon"); + DaemonEnvironmentValidator.instance().validateEnvironmentVariables(variables); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonHostIpResolverTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonHostIpResolverTests.java new file mode 100644 index 0000000000..157ae4bcfb --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonHostIpResolverTests.java @@ -0,0 +1,42 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DaemonHostIpResolver.LOCALHOST; + +import org.junit.Test; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DaemonHostIpResolver; + +public class DaemonHostIpResolverTests { + + @Test + public void return_local_host_with_null() { + assertThat(new DaemonHostIpResolver().resolveIp(null), is(LOCALHOST)); + } + + @Test + public void return_local_host_with_blank() { + assertThat(new DaemonHostIpResolver().resolveIp(""), is(LOCALHOST)); + } + + @Test + public void return_local_host_with_arbitrary() { + assertThat(new DaemonHostIpResolver().resolveIp("arbitrary"), is(LOCALHOST)); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerComposeFilesTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerComposeFilesTests.java new file mode 100644 index 0000000000..da8a22bbd9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerComposeFilesTests.java @@ -0,0 +1,99 @@ +/* + * Copyright 2018-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; + +import java.io.File; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.rules.TemporaryFolder; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles; + +public class DockerComposeFilesTests { + + @Rule + public final TemporaryFolder tempFolder = new TemporaryFolder(); + + @Rule + public final ExpectedException exception = ExpectedException.none(); + + @Test + public void throw_exception_when_compose_file_is_not_specified() { + exception.expect(IllegalStateException.class); + exception.expectMessage("A docker compose file must be specified."); + DockerComposeFiles.from(); + } + + @Test + public void throw_exception_when_compose_file_does_not_exist() { + exception.expect(IllegalStateException.class); + exception.expectMessage("The following docker-compose files:"); + exception.expectMessage("does-not-exist.yaml"); + exception.expectMessage("do not exist."); + DockerComposeFiles.from("does-not-exist.yaml"); + } + + @Test + public void + throw_correct_exception_when_there_is_a_single_missing_compose_file_with_an_existing_compose_file() + throws Exception { + exception.expect(IllegalStateException.class); + exception.expectMessage("The following docker-compose files:"); + exception.expectMessage("does-not-exist.yaml"); + exception.expectMessage("do not exist."); + exception.expectMessage(not(containsString("docker-compose.yaml"))); + + File composeFile = tempFolder.newFile("docker-compose.yaml"); + DockerComposeFiles.from("does-not-exist.yaml", composeFile.getAbsolutePath()); + } + + @Test + public void generate_docker_compose_file_command_correctly_for_single_compose_file() throws Exception { + File composeFile = tempFolder.newFile("docker-compose.yaml"); + DockerComposeFiles dockerComposeFiles = DockerComposeFiles.from(composeFile.getAbsolutePath()); + assertThat(dockerComposeFiles.constructComposeFileCommand(), contains("--file", composeFile.getAbsolutePath())); + } + + @Test + public void generate_docker_compose_file_command_correctly_for_multiple_compose_files() throws Exception { + File composeFile1 = tempFolder.newFile("docker-compose1.yaml"); + File composeFile2 = tempFolder.newFile("docker-compose2.yaml"); + DockerComposeFiles dockerComposeFiles = DockerComposeFiles.from(composeFile1.getAbsolutePath(), composeFile2.getAbsolutePath()); + assertThat(dockerComposeFiles.constructComposeFileCommand(), contains( + "--file", composeFile1.getAbsolutePath(), "--file", composeFile2.getAbsolutePath())); + } + + @Test + public void testFromClasspathExist() { + DockerComposeFiles dockerComposeFiles = DockerComposeFiles.from("classpath:docker-compose-cp1.yaml", + "classpath:org/springframework/cloud/dataflow/common/test/docker/compose/docker-compose-cp2.yaml"); + assertThat(dockerComposeFiles.constructComposeFileCommand(), contains(is("--file"), + containsString("docker-compose-cp1.yaml"), is("--file"), containsString("docker-compose-cp2.yaml"))); + } + + @Test + public void testFromClasspathDoesNotExist() { + exception.expect(IllegalArgumentException.class); + exception.expectMessage("Can't find resource classpath:does-not-exist.yaml"); + DockerComposeFiles.from("classpath:does-not-exist.yaml"); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerTypeTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerTypeTests.java new file mode 100644 index 0000000000..83c25fb9cb --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerTypeTests.java @@ -0,0 +1,55 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; + +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import org.junit.Test; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerType; + +public class DockerTypeTests { + + @Test + public void return_remote_as_first_valid_type_if_environment_is_illegal_for_daemon() { + + Map variables = new HashMap<>(); + variables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + variables.put(DOCKER_TLS_VERIFY, "1"); + variables.put(DOCKER_CERT_PATH, "/path/to/certs"); + assertThat(DockerType.getFirstValidDockerTypeForEnvironment(variables), is(Optional.of(DockerType.REMOTE))); + } + + @Test + public void return_daemon_as_first_valid_type_if_environment_is_illegal_for_remote() { + Map variables = new HashMap<>(); + assertThat(DockerType.getFirstValidDockerTypeForEnvironment(variables), is(Optional.of(DockerType.DAEMON))); + } + + @Test + public void return_absent_as_first_valid_type_if_environment_is_illegal_for_all() { + Map variables = new HashMap<>(); + variables.put(DOCKER_TLS_VERIFY, "1"); + assertThat(DockerType.getFirstValidDockerTypeForEnvironment(variables), is(Optional.empty())); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/MockDockerEnvironment.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/MockDockerEnvironment.java new file mode 100644 index 0000000000..3ce6ee1098 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/MockDockerEnvironment.java @@ -0,0 +1,94 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Ports; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailure; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +public class MockDockerEnvironment { + + private final DockerCompose dockerComposeProcess; + + public MockDockerEnvironment(DockerCompose dockerComposeProcess) { + this.dockerComposeProcess = dockerComposeProcess; + } + + public DockerPort availableService(String service, String ip, int externalPortNumber, int internalPortNumber) throws Exception { + DockerPort port = port(service, ip, externalPortNumber, internalPortNumber); + doReturn(true).when(port).isListeningNow(); + return port; + } + + public DockerPort unavailableService(String service, String ip, int externalPortNumber, int internalPortNumber) throws Exception { + DockerPort port = port(service, ip, externalPortNumber, internalPortNumber); + doReturn(false).when(port).isListeningNow(); + return port; + } + + public DockerPort availableHttpService(String service, String ip, int externalPortNumber, int internalPortNumber) throws Exception { + DockerPort port = availableService(service, ip, externalPortNumber, internalPortNumber); + doReturn(true).when(port).isHttpResponding(any(), eq(false)); + doReturn(SuccessOrFailure.success()).when(port).isHttpRespondingSuccessfully(any(), eq(false)); + return port; + } + + public DockerPort unavailableHttpService(String service, String ip, int externalPortNumber, int internalPortNumber) throws Exception { + DockerPort port = availableService(service, ip, externalPortNumber, internalPortNumber); + doReturn(false).when(port).isHttpResponding(any(), eq(false)); + return port; + } + + public DockerPort port(String service, String ip, int externalPortNumber, int internalPortNumber) throws IOException, InterruptedException { + DockerPort port = dockerPortSpy(ip, externalPortNumber, internalPortNumber); + when(dockerComposeProcess.ports(service)).thenReturn(new Ports(port)); + return port; + } + + public void ephemeralPort(String service, String ip, int internalPortNumber) throws IOException, InterruptedException { + AtomicInteger currentExternalPort = new AtomicInteger(33700); + when(dockerComposeProcess.ports(service)).then(a -> { + DockerPort port = dockerPortSpy(ip, currentExternalPort.incrementAndGet(), internalPortNumber); + return new Ports(port); + }); + } + + public void ports(String service, String ip, Integer... portNumbers) throws IOException, InterruptedException { + List ports = Arrays.asList(portNumbers) + .stream() + .map(portNumber -> dockerPortSpy(ip, portNumber, portNumber)) + .collect(Collectors.toList()); + when(dockerComposeProcess.ports(service)).thenReturn(new Ports(ports)); + } + + private static DockerPort dockerPortSpy(String ip, int externalPortNumber, int internalPortNumber) { + DockerPort port = new DockerPort(ip, externalPortNumber, internalPortNumber); + return spy(port); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ProjectNameTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ProjectNameTests.java new file mode 100644 index 0000000000..bfe29c2b1d --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ProjectNameTests.java @@ -0,0 +1,90 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.util.List; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; + +public class ProjectNameTests { + + @Rule + public final ExpectedException exception = ExpectedException.none(); + + @Test + public void use_project_name_prefix_in_construct_compose_command() { + List command = ProjectName.random().constructComposeFileCommand(); + + assertThat(command, hasSize(2)); + assertThat(command.get(0), is("--project-name")); + } + + @Test + public void produce_different_names_on_successive_calls_to_random() { + List firstCommand = ProjectName.random().constructComposeFileCommand(); + List secondCommand = ProjectName.random().constructComposeFileCommand(); + + assertThat(firstCommand, is(not(equalTo(secondCommand)))); + } + + @Test + public void have_eight_characters_long_random() { + String randomName = ProjectName.random().constructComposeFileCommand().get(1); + assertThat(randomName.length(), is(8)); + } + + @Test + public void should_pass_name_to_command_in_from_string_factory() { + List command = ProjectName.fromString("projectname").constructComposeFileCommand(); + assertThat(command, contains("--project-name", "projectname")); + } + + @Test + public void should_disallow_names_in_from_string_factory() { + List command = ProjectName.fromString("projectname").constructComposeFileCommand(); + assertThat(command, contains("--project-name", "projectname")); + } + + @Test + public void reject_blanks_in_from_string() { + exception.expect(IllegalStateException.class); + exception.expectMessage("ProjectName must not be blank."); + ProjectName.fromString(" "); + } + + @Test + public void match_validation_behavior_of_docker_compose_cli() { + exception.expect(IllegalStateException.class); + exception.expectMessage("ProjectName 'Crazy#Proj ect!Name' not allowed, please use lowercase letters and numbers only."); + ProjectName.fromString("Crazy#Proj ect!Name"); + } + + @Test + public void should_return_the_project_name_when_asString_called() { + String projectName = ProjectName.fromString("projectname").asString(); + assertThat(projectName, is("projectname")); + } +} + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteEnvironmentValidatorTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteEnvironmentValidatorTests.java new file mode 100644 index 0000000000..b924e33176 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteEnvironmentValidatorTests.java @@ -0,0 +1,76 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; + +import java.util.HashMap; +import java.util.Map; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.RemoteEnvironmentValidator; + +public class RemoteEnvironmentValidatorTests { + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void throw_exception_if_docker_host_is_not_set() { + Map variables = new HashMap<>(); + variables.put("SOME_VARIABLE", "SOME_VALUE"); + + exception.expect(IllegalStateException.class); + exception.expectMessage("Missing required environment variables: "); + exception.expectMessage(DOCKER_HOST); + RemoteEnvironmentValidator.instance().validateEnvironmentVariables(variables); + } + + @Test + public void throw_exception_if_docker_cert_path_is_missing_and_tls_is_on() { + Map variables = new HashMap<>(); + variables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + variables.put(DOCKER_TLS_VERIFY, "1"); + + exception.expect(IllegalStateException.class); + exception.expectMessage("Missing required environment variables: "); + exception.expectMessage(DOCKER_CERT_PATH); + RemoteEnvironmentValidator.instance().validateEnvironmentVariables(variables); + } + + @Test + public void validate_environment_with_all_valid_variables_set_without_tls() { + Map variables = new HashMap<>(); + variables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + variables.put("SOME_VARIABLE", "SOME_VALUE"); + + RemoteEnvironmentValidator.instance().validateEnvironmentVariables(variables); + } + + @Test + public void validate_environment_with_all_valid_variables_set_with_tls() { + Map variables = new HashMap<>(); + variables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + variables.put(DOCKER_TLS_VERIFY, "1"); + variables.put(DOCKER_CERT_PATH, "/path/to/certs"); + variables.put("SOME_VARIABLE", "SOME_VALUE"); + + RemoteEnvironmentValidator.instance().validateEnvironmentVariables(variables); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteHostIpResolverTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteHostIpResolverTests.java new file mode 100644 index 0000000000..fb499934dc --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteHostIpResolverTests.java @@ -0,0 +1,60 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.TCP_PROTOCOL; + +import org.hamcrest.Matchers; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.RemoteHostIpResolver; + +public class RemoteHostIpResolverTests { + + private static final String IP = "192.168.99.100"; + private static final int PORT = 2376; + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void result_in_error_blank_when_resolving_invalid_docker_host() { + exception.expect(IllegalArgumentException.class); + exception.expectMessage("DOCKER_HOST cannot be blank/null"); + new RemoteHostIpResolver().resolveIp(""); + } + + @Test + public void result_in_error_null_when_resolving_invalid_docker_host() { + exception.expect(IllegalArgumentException.class); + exception.expectMessage("DOCKER_HOST cannot be blank/null"); + new RemoteHostIpResolver().resolveIp(null); + } + + @Test + public void resolve_docker_host_with_port() { + String dockerHost = String.format("%s%s:%d", TCP_PROTOCOL, IP, PORT); + assertThat(new RemoteHostIpResolver().resolveIp(dockerHost), Matchers.is(IP)); + } + + @Test + public void resolve_docker_host_without_port() { + String dockerHost = String.format("%s%s", TCP_PROTOCOL, IP); + assertThat(new RemoteHostIpResolver().resolveIp(dockerHost), Matchers.is(IP)); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerCacheTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerCacheTests.java new file mode 100644 index 0000000000..fe525ae3a8 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerCacheTests.java @@ -0,0 +1,50 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.sameInstance; +import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.mock; + +import org.junit.Test; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerCache; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +public class ContainerCacheTests { + + private static final String CONTAINER_NAME = "container"; + + private final Docker docker = mock(Docker.class); + private final DockerCompose dockerCompose = mock(DockerCompose.class); + private final ContainerCache containers = new ContainerCache(docker, dockerCompose); + + @Test + public void return_a_container_with_the_specified_name_when_getting_a_new_container() { + Container container = containers.container(CONTAINER_NAME); + assertThat(container, is(new Container(CONTAINER_NAME, docker, dockerCompose))); + } + + @Test + public void return_the_same_object_when_getting_a_container_twice() { + Container container = containers.container(CONTAINER_NAME); + Container sameContainer = containers.container(CONTAINER_NAME); + assertThat(container, is(sameInstance(sameContainer))); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerIntegrationTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerIntegrationTests.java new file mode 100644 index 0000000000..9eb4ccdec9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerIntegrationTests.java @@ -0,0 +1,94 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import static com.jayway.awaitility.Awaitility.await; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertEquals; +import static org.junit.Assume.assumeThat; +import static org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecArgument.arguments; +import static org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecOption.noOptions; + +import com.github.zafarkhaja.semver.Version; +import com.jayway.awaitility.core.ConditionFactory; + +import java.io.IOException; +import java.util.concurrent.TimeUnit; +import org.junit.Test; +import org.mockito.internal.matchers.GreaterOrEqual; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ProjectName; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.State; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DefaultDockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerExecutable; + +public class ContainerIntegrationTests { + + private static final ConditionFactory wait = await().atMost(10, TimeUnit.SECONDS); + + private final DockerMachine dockerMachine = DockerMachine.localMachine().build(); + private final Docker docker = new Docker(DockerExecutable.builder() + .dockerConfiguration(dockerMachine) + .build()); + + @Test + public void testStateChanges_withoutHealthCheck() throws IOException, InterruptedException { + DockerCompose dockerCompose = new DefaultDockerCompose( + DockerComposeFiles.from("src/test/resources/no-healthcheck.yaml"), + dockerMachine, + ProjectName.random()); + + // The noHealthcheck service has no healthcheck specified; it should be immediately healthy + Container container = new Container("noHealthcheck", docker, dockerCompose); + assertEquals(State.DOWN, container.state()); + container.up(); + assertEquals(State.HEALTHY, container.state()); + container.kill(); + assertEquals(State.DOWN, container.state()); + } + + /** + * This test is not currently enabled in Circle as it does not provide a sufficiently recent version of docker-compose. + * + * @see Issue #156 + */ + @Test + public void testStateChanges_withHealthCheck() throws IOException, InterruptedException { +// assumeThat("docker version", Docker.version(), new GreaterOrEqual<>(Version.forIntegers(1, 12, 0))); +// assumeThat("docker-compose version", DockerCompose.version(), new GreaterOrEqual<>(Version.forIntegers(1, 10, 0))); + + DockerCompose dockerCompose = new DefaultDockerCompose( + DockerComposeFiles.from("src/test/resources/native-healthcheck.yaml"), + dockerMachine, + ProjectName.random()); + + // The withHealthcheck service's healthcheck checks every 100ms whether the file "healthy" exists + Container container = new Container("withHealthcheck", docker, dockerCompose); + assertEquals(State.DOWN, container.state()); + container.up(); + assertEquals(State.UNHEALTHY, container.state()); + dockerCompose.exec(noOptions(), "withHealthcheck", arguments("touch", "healthy")); + wait.until(container::state, equalTo(State.HEALTHY)); + dockerCompose.exec(noOptions(), "withHealthcheck", arguments("rm", "healthy")); + wait.until(container::state, equalTo(State.UNHEALTHY)); + container.kill(); + assertEquals(State.DOWN, container.state()); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerNameTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerNameTests.java new file mode 100644 index 0000000000..615b22dcac --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerNameTests.java @@ -0,0 +1,102 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import static java.util.Collections.emptyList; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.core.Is.is; + +import java.util.List; +import org.junit.Test; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerNames; + +public class ContainerNameTests { + + @Test + public void parse_a_semantic_and_raw_name_correctly_from_a_single_line() { + ContainerName actual = ContainerName.fromPsLine("dir_db_1 other line contents"); + + ContainerName expected = ContainerName.builder() + .rawName("dir_db_1") + .semanticName("db") + .build(); + + assertThat(actual, is(expected)); + } + + @Test + public void can_handle_custom_container_names() { + ContainerName name = ContainerName.fromPsLine("test-1.container.name /docker-entrypoint.sh postgres Up 5432/tcp"); + + ContainerName expected = ContainerName.builder() + .rawName("test-1.container.name") + .semanticName("test-1.container.name") + .build(); + + assertThat(name, is(expected)); + } + + @Test + public void result_in_no_container_names_when_ps_output_is_empty() { + List names = ContainerNames.parseFromDockerComposePs("\n----\n"); + assertThat(names, is(emptyList())); + } + + @Test + public void result_in_a_single_container_name_when_ps_output_has_a_single_container() { + List names = ContainerNames.parseFromDockerComposePs("\n----\ndir_db_1 other line contents"); + assertThat(names, contains(containerName("dir", "db", "1"))); + } + + @Test + public void allow_windows_newline_characters() { + List names = ContainerNames.parseFromDockerComposePs("\r\n----\r\ndir_db_1 other line contents"); + assertThat(names, contains(containerName("dir", "db", "1"))); + } + + @Test + public void allow_containers_with_underscores_in_their_name() { + List names = ContainerNames.parseFromDockerComposePs("\n----\ndir_left_right_1 other line contents"); + assertThat(names, contains(containerName("dir", "left_right", "1"))); + } + + @Test + public void result_in_two_container_names_when_ps_output_has_two_containers() { + List names = ContainerNames.parseFromDockerComposePs("\n----\ndir_db_1 other line contents\ndir_db2_1 other stuff"); + assertThat(names, contains(containerName("dir", "db", "1"), containerName("dir", "db2", "1"))); + } + + @Test + public void ignore_an_empty_line_in_ps_output() { + List names = ContainerNames.parseFromDockerComposePs("\n----\ndir_db_1 other line contents\n\n"); + assertThat(names, contains(containerName("dir", "db", "1"))); + } + + @Test + public void ignore_a_line_with_ony_spaces_in_ps_output() { + List names = ContainerNames.parseFromDockerComposePs("\n----\ndir_db_1 other line contents\n \n"); + assertThat(names, contains(containerName("dir", "db", "1"))); + } + + private static ContainerName containerName(String project, String semantic, String number) { + return ContainerName.builder() + .rawName(project + "_" + semantic + "_" + number) + .semanticName(semantic) + .build(); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerTests.java new file mode 100644 index 0000000000..4bf4bb92cf --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerTests.java @@ -0,0 +1,152 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.failureWithMessage; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.successful; + +import java.io.IOException; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.MockDockerEnvironment; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +public class ContainerTests { + + private static final String IP = "127.0.0.1"; + + @Rule + public ExpectedException exception = ExpectedException.none(); + + private final Docker docker = mock(Docker.class); + private final DockerCompose dockerCompose = mock(DockerCompose.class); + private final MockDockerEnvironment env = new MockDockerEnvironment(dockerCompose); + private final Container container = new Container("service", docker, dockerCompose); + + @Test + public void return_port_for_container_when_external_port_number_given() throws Exception { + DockerPort expected = env.availableService("service", IP, 5433, 5432); + DockerPort port = container.portMappedExternallyTo(5433); + assertThat(port, is(expected)); + } + + @Test + public void return_port_for_container_when_internal_port_number_given() throws Exception { + DockerPort expected = env.availableService("service", IP, 5433, 5432); + DockerPort port = container.port(5432); + assertThat(port, is(expected)); + } + + @Test + public void call_docker_ports_once_when_two_ports_are_requested() throws Exception { + env.ports("service", IP, 8080, 8081); + container.port(8080); + container.port(8081); + verify(dockerCompose, times(1)).ports("service"); + } + + @Test + public void return_updated_external_port_on_restart() throws IOException, InterruptedException { + int internalPort = 5432; + env.ephemeralPort("service", IP, internalPort); + + DockerPort port = container.port(internalPort); + int prePort = port.getExternalPort(); + + DockerPort samePort = container.port(internalPort); + assertThat(prePort, is(samePort.getExternalPort())); + + container.stop(); + container.start(); + + DockerPort updatedPort = container.port(internalPort); + assertThat(prePort, not(is(updatedPort.getExternalPort()))); + } + + @Test + public void throw_illegal_argument_exception_when_a_port_for_an_unknown_external_port_is_requested() + throws Exception { + // Service must have ports otherwise we end up with an exception telling you the service is listening at all + env.availableService("service", IP, 5400, 5400); + exception.expect(IllegalArgumentException.class); + exception.expectMessage("No port mapped externally to '5432' for container 'service'"); + container.portMappedExternallyTo(5432); + } + + @Test + public void throw_illegal_argument_exception_when_a_port_for_an_unknown_internal_port_is_requested() + throws Exception { + env.availableService("service", IP, 5400, 5400); + exception.expect(IllegalArgumentException.class); + exception.expectMessage("No internal port '5432' for container 'service'"); + container.port(5432); + } + + @Test + public void have_all_ports_open_if_all_exposed_ports_are_open() throws Exception { + env.availableHttpService("service", IP, 1234, 1234); + + assertThat(container.areAllPortsOpen(), is(successful())); + } + + @Test + public void not_have_all_ports_open_if_has_at_least_one_closed_port_and_report_the_name_of_the_port() throws Exception { + int unavailablePort = 4321; + String unavailablePortString = Integer.toString(unavailablePort); + + env.availableService("service", IP, 1234, 1234); + env.unavailableService("service", IP, unavailablePort, unavailablePort); + + assertThat(container.areAllPortsOpen(), is(failureWithMessage(containsString(unavailablePortString)))); + } + + @Test + public void be_listening_on_http_when_the_port_is() throws Exception { + env.availableHttpService("service", IP, 1234, 2345); + + assertThat( + container.portIsListeningOnHttp(2345, port -> "http://some.url:" + port), + is(successful())); + } + + @Test + public void not_be_listening_on_http_when_the_port_is_not_and_reports_the_port_number_and_url() throws Exception { + int unavailablePort = 1234; + String unvaliablePortString = Integer.toString(unavailablePort); + + env.unavailableHttpService("service", IP, unavailablePort, unavailablePort); + + assertThat( + container.portIsListeningOnHttp(unavailablePort, port -> "http://some.url:" + port.getInternalPort()), + is(failureWithMessage(both( + containsString(unvaliablePortString)).and( + containsString("http://some.url:" + unvaliablePortString) + )))); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerPortFormattingTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerPortFormattingTests.java new file mode 100644 index 0000000000..11ef9f8ced --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerPortFormattingTests.java @@ -0,0 +1,55 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; + +import org.junit.Test; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; + +public class DockerPortFormattingTests { + private final DockerPort dockerPort = new DockerPort("hostname", 1234, 4321); + + @Test public void + have_no_effect_on_a_string_with_no_substitutions() { + assertThat( + dockerPort.inFormat("no substitutions"), + is("no substitutions")); + } + + @Test public void + allow_building_an_externally_accessible_address() { + assertThat( + dockerPort.inFormat("http://$HOST:$EXTERNAL_PORT/api"), + is("http://hostname:1234/api")); + } + + @Test public void + allow_building_an_address_with_an_internal_port() { + assertThat( + dockerPort.inFormat("http://localhost:$INTERNAL_PORT/api"), + is("http://localhost:4321/api")); + } + + @Test public void + allow_multiple_copies_of_each_substitution() { + assertThat( + dockerPort.inFormat("$HOST,$HOST,$INTERNAL_PORT,$INTERNAL_PORT,$EXTERNAL_PORT,$EXTERNAL_PORT"), + is("hostname,hostname,4321,4321,1234,1234")); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/LocalBuilderTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/LocalBuilderTests.java new file mode 100644 index 0000000000..c963dc7bb6 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/LocalBuilderTests.java @@ -0,0 +1,230 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.core.Is.is; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DaemonHostIpResolver.LOCALHOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerType.DAEMON; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerType.REMOTE; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; +import static org.springframework.cloud.dataflow.common.test.docker.compose.matchers.DockerMachineEnvironmentMatcher.containsEnvironment; + +import java.util.HashMap; +import java.util.Map; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine.LocalBuilder; + +public class LocalBuilderTests { + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void override_previous_environment_when_additional_environment_set_twice_daemon() { + Map environment1 = new HashMap<>(); + environment1.put("ENV_1", "VAL_1"); + Map environment2 = new HashMap<>(); + environment2.put("ENV_2", "VAL_2"); + DockerMachine localMachine = new LocalBuilder(DAEMON, new HashMap<>()).withEnvironment(environment1) + .withEnvironment(environment2) + .build(); + assertThat(localMachine, not(containsEnvironment(environment1))); + assertThat(localMachine, containsEnvironment(environment2)); + } + + @Test + public void be_union_of_additional_environment_and_individual_environment_when_both_set_daemon() { + Map environment = new HashMap<>(); + environment.put("ENV_1", "VAL_1"); + environment.put("ENV_2", "VAL_2"); + DockerMachine localMachine = new LocalBuilder(DAEMON, new HashMap<>()).withEnvironment(environment) + .withAdditionalEnvironmentVariable("ENV_3", "VAL_3") + .build(); + assertThat(localMachine, containsEnvironment(environment)); + Map environment2 = new HashMap<>(); + environment2.put("ENV_3", "VAL_3"); + assertThat(localMachine, containsEnvironment(environment2)); + } + + @Test + public void override_previous_environment_with_additional_environment_set_twice_remote() { + Map dockerVariables = new HashMap<>(); + dockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + Map environment1 = new HashMap<>(); + environment1.put("ENV_1", "VAL_1"); + Map environment2 = new HashMap<>(); + environment2.put("ENV_2", "VAL_2"); + DockerMachine localMachine = new LocalBuilder(REMOTE, dockerVariables).withEnvironment(environment1) + .withEnvironment(environment2) + .build(); + assertThat(localMachine, not(containsEnvironment(environment1))); + assertThat(localMachine, containsEnvironment(environment2)); + } + + @Test + public void be_union_of_additional_environment_and_individual_environment_when_both_set_remote() { + Map dockerVariables = new HashMap<>(); + dockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + Map environment = new HashMap<>(); + environment.put("ENV_1", "VAL_1"); + environment.put("ENV_2", "VAL_2"); + DockerMachine localMachine = new LocalBuilder(REMOTE, dockerVariables).withEnvironment(environment) + .withAdditionalEnvironmentVariable("ENV_3", "VAL_3") + .build(); + assertThat(localMachine, containsEnvironment(environment)); + Map environment2 = new HashMap<>(); + environment2.put("ENV_3", "VAL_3"); + assertThat(localMachine, containsEnvironment(environment2)); + } + + @Test + public void get_variable_overriden_with_additional_environment() { + Map environment = new HashMap<>(); + environment.put("ENV_1", "VAL_1"); + environment.put("ENV_2", "VAL_2"); + DockerMachine localMachine = new LocalBuilder(DAEMON, new HashMap<>()).withEnvironment(environment) + .withAdditionalEnvironmentVariable("ENV_2", "DIFFERENT_VALUE") + .build(); + + Map expected = new HashMap<>(); + expected.put("ENV_1", "VAL_1"); + expected.put("ENV_2", "DIFFERENT_VALUE"); + assertThat(localMachine, not(containsEnvironment(environment))); + assertThat(localMachine, containsEnvironment(expected)); + } + + @Test + public void override_system_environment_with_additional_environment() { + Map systemEnv = new HashMap<>(); + systemEnv.put("ENV_1", "VAL_1"); + Map overrideEnv = new HashMap<>(); + overrideEnv.put("ENV_1", "DIFFERENT_VALUE"); + DockerMachine localMachine = new LocalBuilder(DAEMON, systemEnv) + .withEnvironment(overrideEnv) + .build(); + + assertThat(localMachine, not(containsEnvironment(systemEnv))); + assertThat(localMachine, containsEnvironment(overrideEnv)); + } + + @Test + public void have_invalid_variables_daemon() { + Map invalidDockerVariables = new HashMap<>(); + invalidDockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + invalidDockerVariables.put(DOCKER_TLS_VERIFY, "1"); + invalidDockerVariables.put(DOCKER_CERT_PATH, "/path/to/certs"); + + exception.expect(IllegalStateException.class); + exception.expectMessage("These variables were set"); + exception.expectMessage(DOCKER_HOST); + exception.expectMessage(DOCKER_CERT_PATH); + exception.expectMessage(DOCKER_TLS_VERIFY); + exception.expectMessage("They cannot be set when connecting to a local docker daemon"); + + new LocalBuilder(DAEMON, invalidDockerVariables).build(); + } + + @Test + public void have_invalid_additional_variables_daemon() { + exception.expect(IllegalStateException.class); + exception.expectMessage("The following variables"); + exception.expectMessage(DOCKER_HOST); + exception.expectMessage("cannot exist in your additional environment variable block"); + + new LocalBuilder(DAEMON, new HashMap<>()).withAdditionalEnvironmentVariable(DOCKER_HOST, "tcp://192.168.99.100:2376") + .build(); + } + + @Test + public void have_invalid_additional_variables_remote() { + Map dockerVariables = new HashMap<>(); + dockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + dockerVariables.put(DOCKER_TLS_VERIFY, "1"); + dockerVariables.put(DOCKER_CERT_PATH, "/path/to/certs"); + + exception.expect(IllegalStateException.class); + exception.expectMessage("The following variables"); + exception.expectMessage(DOCKER_HOST); + exception.expectMessage("cannot exist in your additional environment variable block"); + + new LocalBuilder(REMOTE, dockerVariables).withAdditionalEnvironmentVariable(DOCKER_HOST, "tcp://192.168.99.101:2376") + .build(); + } + + @Test + public void return_localhost_as_ip_daemon() { + DockerMachine localMachine = new LocalBuilder(DAEMON, new HashMap<>()).build(); + assertThat(localMachine.getIp(), is(LOCALHOST)); + } + + @Test + public void return_docker_host_as_ip_remote() { + Map dockerVariables = new HashMap<>(); + dockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + dockerVariables.put(DOCKER_TLS_VERIFY, "1"); + dockerVariables.put(DOCKER_CERT_PATH, "/path/to/certs"); + + DockerMachine localMachine = new LocalBuilder(REMOTE, dockerVariables).build(); + assertThat(localMachine.getIp(), is("192.168.99.100")); + } + + @Test + public void have_missing_docker_host_remote() { + exception.expect(IllegalStateException.class); + exception.expectMessage("Missing required environment variables: "); + exception.expectMessage(DOCKER_HOST); + new LocalBuilder(REMOTE, new HashMap<>()).build(); + } + + @Test + public void build_without_tls_remote() { + Map dockerVariables = new HashMap<>(); + dockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + + DockerMachine localMachine = new LocalBuilder(REMOTE, dockerVariables).build(); + assertThat(localMachine, containsEnvironment(dockerVariables)); + } + + @Test + public void have_missing_cert_path_remote() { + Map dockerVariables = new HashMap<>(); + dockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + dockerVariables.put(DOCKER_TLS_VERIFY, "1"); + + exception.expect(IllegalStateException.class); + exception.expectMessage("Missing required environment variables: "); + exception.expectMessage(DOCKER_CERT_PATH); + new LocalBuilder(REMOTE, dockerVariables).build(); + } + + @Test + public void build_with_tls_remote() { + Map dockerVariables = new HashMap<>(); + dockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + dockerVariables.put(DOCKER_TLS_VERIFY, "1"); + dockerVariables.put(DOCKER_CERT_PATH, "/path/to/certs"); + + DockerMachine localMachine = new LocalBuilder(REMOTE, dockerVariables).build(); + assertThat(localMachine, containsEnvironment(dockerVariables)); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/PortsTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/PortsTests.java new file mode 100644 index 0000000000..529b6d58ed --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/PortsTests.java @@ -0,0 +1,97 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import static java.util.Collections.emptyList; +import static org.assertj.core.api.Assertions.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import java.util.Arrays; + +import org.junit.jupiter.api.Test; + +public class PortsTests { + + private static final String LOCALHOST_IP = "127.0.0.1"; + + @Test + public void result_in_no_ports_when_there_are_no_ports_in_ps_output() { + String psOutput = "------"; + Ports ports = Ports.parseFromDockerComposePs(psOutput, null); + Ports expected = new Ports(emptyList()); + assertThat(ports, is(expected)); + } + + @Test + public void result_in_single_port_when_there_is_single_tcp_port_mapping() { + String psOutput = "0.0.0.0:5432->5432/tcp"; + Ports ports = Ports.parseFromDockerComposePs(psOutput, LOCALHOST_IP); + Ports expected = new Ports(Arrays.asList(new DockerPort(LOCALHOST_IP, 5432, 5432))); + assertThat(ports, is(expected)); + } + + @Test + public void + result_in_single_port_with_ip_other_than_localhost_when_there_is_single_tcp_port_mapping() { + String psOutput = "10.0.1.2:1234->2345/tcp"; + Ports ports = Ports.parseFromDockerComposePs(psOutput, LOCALHOST_IP); + Ports expected = new Ports(Arrays.asList(new DockerPort("10.0.1.2", 1234, 2345))); + assertThat(ports, is(expected)); + } + + @Test + public void result_in_two_ports_when_there_are_two_tcp_port_mappings() { + String psOutput = "0.0.0.0:5432->5432/tcp, 0.0.0.0:5433->5432/tcp"; + Ports ports = Ports.parseFromDockerComposePs(psOutput, LOCALHOST_IP); + Ports expected = new Ports(Arrays.asList(new DockerPort(LOCALHOST_IP, 5432, 5432), + new DockerPort(LOCALHOST_IP, 5433, 5432))); + assertThat(ports, is(expected)); + } + + @Test + public void result_in_no_ports_when_there_is_a_non_mapped_exposed_port() { + String psOutput = "5432/tcp"; + Ports ports = Ports.parseFromDockerComposePs(psOutput, LOCALHOST_IP); + Ports expected = new Ports(emptyList()); + assertThat(ports, is(expected)); + } + + @Test + public void parse_actual_docker_compose_output() { + String psOutput = + " Name Command State Ports \n" + + "-------------------------------------------------------------------------------------------------------------------------------------------------\n" + + "postgres_postgres_1 /bin/sh -c /usr/local/bin/ ... Up 0.0.0.0:8880->8880/tcp, 8881/tcp, 8882/tcp, 8883/tcp, 8884/tcp, 8885/tcp, 8886/tcp \n" + + ""; + Ports ports = Ports.parseFromDockerComposePs(psOutput, LOCALHOST_IP); + Ports expected = new Ports(Arrays.asList(new DockerPort(LOCALHOST_IP, 8880, 8880))); + assertThat(ports, is(expected)); + } + + @Test + public void throw_illegal_state_exception_when_no_running_container_found_for_service() { + // exception.expect(IllegalArgumentException.class); + // exception.expectMessage("No container found"); + // assertThrows(expectedType, executable) + + IllegalStateException thrown = assertThrows(IllegalStateException.class, + () -> Ports.parseFromDockerComposePs("", ""), + "Expected Ports.parseFromDockerComposePs to throw, but it didn't"); + assertThat(thrown.getMessage()).contains("No container found"); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/RemoteBuilderTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/RemoteBuilderTests.java new file mode 100644 index 0000000000..7c3fa70310 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/RemoteBuilderTests.java @@ -0,0 +1,92 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.collection.IsMapContaining.hasEntry; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; + +import java.util.HashMap; +import java.util.Map; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine; + +public class RemoteBuilderTests { + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void throw_exception_when_building_a_docker_machine_without_a_host() { + exception.expect(IllegalStateException.class); + exception.expectMessage("Missing required environment variables"); + exception.expectMessage("DOCKER_HOST"); + DockerMachine.remoteMachine() + .withoutTLS() + .build(); + } + + @Test + public void have_no_tls_environment_variables_when_a_docker_machine_is_built_without_tls() { + DockerMachine dockerMachine = DockerMachine.remoteMachine() + .host("tcp://192.168.99.100") + .withoutTLS() + .build(); + + Map expected = new HashMap<>(); + expected.put(DOCKER_HOST, "tcp://192.168.99.100"); + + validateEnvironmentConfiguredDirectly(dockerMachine, expected); + } + + @Test + public void have_tls_environment_variables_set_when_a_docker_machine_is_built_with_tls() { + DockerMachine dockerMachine = DockerMachine.remoteMachine() + .host("tcp://192.168.99.100") + .withTLS("/path/to/certs") + .build(); + + Map expected = new HashMap<>(); + expected.put(DOCKER_HOST, "tcp://192.168.99.100"); + expected.put(DOCKER_CERT_PATH, "/path/to/certs"); + validateEnvironmentConfiguredDirectly(dockerMachine, expected); + } + + @Test + public void build_a_docker_machine_with_additional_environment_variables() { + DockerMachine dockerMachine = DockerMachine.remoteMachine() + .host("tcp://192.168.99.100") + .withoutTLS() + .withAdditionalEnvironmentVariable("SOME_VARIABLE", "SOME_VALUE") + .build(); + + Map expected = new HashMap<>(); + expected.put(DOCKER_HOST, "tcp://192.168.99.100"); + expected.put("SOME_VARIABLE", "SOME_VALUE"); + validateEnvironmentConfiguredDirectly(dockerMachine, expected); + } + + private static void validateEnvironmentConfiguredDirectly(DockerMachine dockerMachine, Map expectedEnvironment) { + ProcessBuilder process = dockerMachine.configuredDockerComposeProcess(); + + Map environment = process.environment(); + expectedEnvironment.forEach((var, val) -> assertThat(environment, hasEntry(var, val))); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterWaitTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterWaitTests.java new file mode 100644 index 0000000000..614385a748 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterWaitTests.java @@ -0,0 +1,76 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailure.failure; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailure.success; + +import org.joda.time.Duration; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Cluster; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerCache; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterHealthCheck; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterWait; + +public class ClusterWaitTests { + + private static final Duration DURATION = Duration.standardSeconds(1); + private static final String IP = "192.168.100.100"; + + private final ContainerCache containerCache = mock(ContainerCache.class); + private final ClusterHealthCheck clusterHealthCheck = mock(ClusterHealthCheck.class); + + private final Cluster cluster = Cluster.builder() + .containerCache(containerCache) + .ip(IP) + .build(); + + @Rule public ExpectedException exception = ExpectedException.none(); + + + @Test public void + return_when_a_cluster_is_ready() throws InterruptedException { + when(clusterHealthCheck.isClusterHealthy(cluster)).thenReturn(success()); + ClusterWait wait = new ClusterWait(clusterHealthCheck, DURATION); + wait.waitUntilReady(cluster); + } + + @Test public void + check_until_a_cluster_is_ready() throws InterruptedException { + when(clusterHealthCheck.isClusterHealthy(cluster)).thenReturn(failure("failure!"), success()); + ClusterWait wait = new ClusterWait(clusterHealthCheck, DURATION); + wait.waitUntilReady(cluster); + verify(clusterHealthCheck, times(2)).isClusterHealthy(cluster); + } + + @Test(timeout = 2000L) public void + timeout_if_the_cluster_is_not_healthy() throws InterruptedException { + when(clusterHealthCheck.isClusterHealthy(cluster)).thenReturn(failure("failure!")); + + exception.expect(IllegalStateException.class); + exception.expectMessage("failure!"); + + ClusterWait wait = new ClusterWait(clusterHealthCheck, DURATION); + + wait.waitUntilReady(cluster); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ExceptionsTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ExceptionsTests.java new file mode 100644 index 0000000000..e6149da7d4 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ExceptionsTests.java @@ -0,0 +1,34 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +import org.junit.Test; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.Exceptions; + +public class ExceptionsTests { + @Test + public void print_out_a_condensed_version_of_the_stacktrace() { + RuntimeException exception = new RuntimeException("foo", new IllegalStateException("bar", new UnsupportedOperationException("baz"))); + assertThat(Exceptions.condensedStacktraceFor(exception), is( + "java.lang.RuntimeException: foo\n" + + "java.lang.IllegalStateException: bar\n" + + "java.lang.UnsupportedOperationException: baz" + )); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HttpHealthCheckTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HttpHealthCheckTests.java new file mode 100644 index 0000000000..743222ee32 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HttpHealthCheckTests.java @@ -0,0 +1,63 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.failure; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.successful; + +import java.util.function.Function; +import org.junit.Test; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.HealthChecks; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailure; + +public class HttpHealthCheckTests { + private static final Function URL_FUNCTION = port -> null; + public static final int PORT = 1234; + private final Container container = mock(Container.class); + + @Test + public void be_healthy_when_the_port_is_listening_over_http() { + whenTheContainerIsListeningOnHttpTo(PORT, URL_FUNCTION); + + assertThat( + HealthChecks.toRespondOverHttp(PORT, URL_FUNCTION).isHealthy(container), + is(successful())); + } + + @Test + public void be_unhealthy_when_all_ports_are_not_listening() { + whenTheContainerIsNotListeningOnHttpTo(PORT, URL_FUNCTION); + + assertThat( + HealthChecks.toRespondOverHttp(PORT, URL_FUNCTION).isHealthy(container), + is(failure())); + } + + private void whenTheContainerIsListeningOnHttpTo(int port, Function urlFunction) { + when(container.portIsListeningOnHttp(port, urlFunction)).thenReturn(SuccessOrFailure.success()); + } + + private void whenTheContainerIsNotListeningOnHttpTo(int port, Function urlFunction) { + when(container.portIsListeningOnHttp(port, urlFunction)).thenReturn(SuccessOrFailure.failure("not listening")); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/PortsHealthCheckTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/PortsHealthCheckTests.java new file mode 100644 index 0000000000..cdd5d2dd89 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/PortsHealthCheckTests.java @@ -0,0 +1,56 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.failure; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.successful; + +import org.junit.Test; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.HealthCheck; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.HealthChecks; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailure; + +public class PortsHealthCheckTests { + private final HealthCheck healthCheck = HealthChecks.toHaveAllPortsOpen(); + private final Container container = mock(Container.class); + + @Test + public void be_healthy_when_all_ports_are_listening() { + whenTheContainerHasAllPortsOpen(); + + assertThat(healthCheck.isHealthy(container), is(successful())); + } + + @Test + public void be_unhealthy_when_all_ports_are_not_listening() { + whenTheContainerDoesNotHaveAllPortsOpen(); + + assertThat(healthCheck.isHealthy(container), is(failure())); + } + + private void whenTheContainerDoesNotHaveAllPortsOpen() { + when(container.areAllPortsOpen()).thenReturn(SuccessOrFailure.failure("not all ports open")); + } + + private void whenTheContainerHasAllPortsOpen() { + when(container.areAllPortsOpen()).thenReturn(SuccessOrFailure.success()); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailureMatchers.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailureMatchers.java new file mode 100644 index 0000000000..651fbba10f --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailureMatchers.java @@ -0,0 +1,81 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import static org.hamcrest.Matchers.anything; +import static org.hamcrest.Matchers.equalTo; + +import org.hamcrest.Description; +import org.hamcrest.FeatureMatcher; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeDiagnosingMatcher; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailure; + +public enum SuccessOrFailureMatchers { + ; + public static class Successful extends TypeSafeDiagnosingMatcher { + @Override + protected boolean matchesSafely(SuccessOrFailure item, Description mismatchDescription) { + if (item.failed()) { + mismatchDescription.appendValue(item); + } + + return item.succeeded(); + } + + @Override + public void describeTo(Description description) { + description.appendText("is successful"); + } + } + + public static Matcher successful() { + return new Successful(); + } + + public static class Failure extends FeatureMatcher { + public Failure(Matcher subMatcher) { + super(subMatcher, "failure message of", "failure message"); + } + + @Override + protected String featureValueOf(SuccessOrFailure actual) { + return actual.failureMessage(); + } + + @Override + protected boolean matchesSafely(SuccessOrFailure actual, Description mismatch) { + if (actual.succeeded()) { + mismatch.appendValue(actual); + return false; + } + + return super.matchesSafely(actual, mismatch); + } + } + + public static Matcher failure() { + return new Failure(anything()); + } + + public static Matcher failureWithMessage(Matcher messageMatcher) { + return new Failure(messageMatcher); + } + + public static Matcher failureWithMessage(String message) { + return new Failure(equalTo(message)); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailureTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailureTests.java new file mode 100644 index 0000000000..f6b1213cdb --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailureTests.java @@ -0,0 +1,80 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.core.Is.is; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.failure; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.failureWithMessage; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.successful; + +import org.junit.Test; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailure; + +public class SuccessOrFailureTests { + @Test + public void not_have_failed_if_actually_a_success() { + assertThat(SuccessOrFailure.success(), is(successful())); + } + + @Test + public void have_failed_if_actually_a_failure() { + assertThat(SuccessOrFailure.failure("oops"), is(failure())); + } + + @Test + public void return_the_failure_message_if_set() { + assertThat(SuccessOrFailure.failure("oops"), is(failureWithMessage("oops"))); + } + + @Test + public void fail_from_an_exception() { + Exception exception = new RuntimeException("oh no"); + assertThat(SuccessOrFailure.fromException(exception), + is(failureWithMessage(both( + containsString("RuntimeException")).and( + containsString("oh no") + )))); + } + + @Test + public void succeed_on_a_lambda_that_returns_true() { + SuccessOrFailure successFromLambda = SuccessOrFailure.onResultOf(() -> true); + assertThat(successFromLambda, is(successful())); + } + + @Test + public void fail_on_a_lambda_that_throws_an_exception() { + SuccessOrFailure failureFromLambda = SuccessOrFailure.onResultOf(() -> { + throw new IllegalArgumentException("oh no"); + }); + + assertThat(failureFromLambda, + is(failureWithMessage(both( + containsString("IllegalArgumentException")).and( + containsString("oh no") + )))); + } + + @Test + public void fail_on_a_lambda_that_returns_false() { + SuccessOrFailure failureFromLambda = SuccessOrFailure.onResultOf(() -> false); + + assertThat(failureFromLambda, is(failureWithMessage("Attempt to complete healthcheck failed"))); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/CommandTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/CommandTests.java new file mode 100644 index 0000000000..8f7114d763 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/CommandTests.java @@ -0,0 +1,119 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Consumer; + +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import static org.apache.commons.io.IOUtils.toInputStream; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.core.Is.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class CommandTests { + @Mock private Process executedProcess; + @Mock private DockerComposeExecutable dockerComposeExecutable; + @Mock private ErrorHandler errorHandler; + private Command dockerComposeCommand; + private final List consumedLogLines = new ArrayList<>(); + private final Consumer logConsumer = s -> consumedLogLines.add(s); + + @Before + public void before() throws IOException { + when(dockerComposeExecutable.execute(any())).thenReturn(executedProcess); + dockerComposeCommand = new Command(dockerComposeExecutable, logConsumer); + + givenTheUnderlyingProcessHasOutput(""); + givenTheUnderlyingProcessTerminatesWithAnExitCodeOf(0); + } + + @Test public void + invoke_error_handler_when_exit_code_of_the_executed_process_is_non_0() throws IOException, InterruptedException { + int expectedExitCode = 1; + givenTheUnderlyingProcessTerminatesWithAnExitCodeOf(expectedExitCode); + dockerComposeCommand.execute(errorHandler, "rm", "-f"); + + verify(errorHandler).handle(expectedExitCode, "", "docker-compose", "rm", "-f"); + } + + @Test public void + not_invoke_error_handler_when_exit_code_of_the_executed_process_is_0() throws IOException, InterruptedException { + dockerComposeCommand.execute(errorHandler, "rm", "-f"); + + verifyNoMoreInteractions(errorHandler); + } + + @Test public void + return_output_when_exit_code_of_the_executed_process_is_non_0() throws IOException, InterruptedException { + String expectedOutput = "test output"; + givenTheUnderlyingProcessTerminatesWithAnExitCodeOf(1); + givenTheUnderlyingProcessHasOutput(expectedOutput); + String commandOutput = dockerComposeCommand.execute(errorHandler, "rm", "-f"); + + assertThat(commandOutput, is(expectedOutput)); + } + + @Test public void + return_output_when_exit_code_of_the_executed_process_is_0() throws IOException, InterruptedException { + String expectedOutput = "test output"; + givenTheUnderlyingProcessHasOutput(expectedOutput); + String commandOutput = dockerComposeCommand.execute(errorHandler, "rm", "-f"); + + assertThat(commandOutput, is(expectedOutput)); + } + + @Test public void + give_the_output_to_the_specified_consumer_as_it_is_available() throws IOException, InterruptedException { + givenTheUnderlyingProcessHasOutput("line 1\nline 2"); + + dockerComposeCommand.execute(errorHandler, "rm", "-f"); + + assertThat(consumedLogLines, contains("line 1", "line 2")); + } + + // flaky test: https://circleci.com/gh/palantir/docker-compose-rule/378, 370, 367, 366 + @Ignore + @Test public void + not_create_long_lived_threads_after_execution() throws IOException, InterruptedException { + int preThreadCount = Thread.getAllStackTraces().entrySet().size(); + dockerComposeCommand.execute(errorHandler, "rm", "-f"); + int postThreadCount = Thread.getAllStackTraces().entrySet().size(); + assertThat("command thread pool has exited", preThreadCount == postThreadCount); + } + + private void givenTheUnderlyingProcessHasOutput(String output) { + when(executedProcess.getInputStream()).thenReturn(toInputStream(output)); + } + + private void givenTheUnderlyingProcessTerminatesWithAnExitCodeOf(int exitCode) { + when(executedProcess.exitValue()).thenReturn(exitCode); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ConflictingContainerRemovingDockerComposeTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ConflictingContainerRemovingDockerComposeTests.java new file mode 100644 index 0000000000..c81dad8c4e --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ConflictingContainerRemovingDockerComposeTests.java @@ -0,0 +1,153 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.anySet; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +public class ConflictingContainerRemovingDockerComposeTests { + private final DockerCompose dockerCompose = mock(DockerCompose.class); + private final Docker docker = mock(Docker.class); + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void require_retry_attempts_to_be_at_least_1() { + exception.expect(IllegalStateException.class); + exception.expectMessage("retryAttempts must be at least 1, was 0"); + new ConflictingContainerRemovingDockerCompose(dockerCompose, docker, 0); + } + + @Test + public void call_up_only_once_if_successful() throws IOException, InterruptedException { + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker); + conflictingContainerRemovingDockerCompose.up(); + + verify(dockerCompose, times(1)).up(); + verifyNoMoreInteractions(docker); + } + + @Test + public void call_rm_and_retry_up_if_conflicting_containers_exist() throws IOException, InterruptedException { + String conflictingContainer = "conflictingContainer"; + doThrow(new DockerExecutionException("The name \"" + conflictingContainer + "\" is already in use")).doNothing() + .when(dockerCompose).up(); + + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker); + conflictingContainerRemovingDockerCompose.up(); + + verify(dockerCompose, times(2)).up(); + verify(docker).rm(new HashSet<>(Arrays.asList(conflictingContainer))); + } + + @Test + public void retry_specified_number_of_times() throws IOException, InterruptedException { + String conflictingContainer = "conflictingContainer"; + DockerExecutionException dockerException = new DockerExecutionException( + "The name \"" + conflictingContainer + "\" is already in use"); + doThrow(dockerException).doThrow(dockerException).doNothing().when(dockerCompose).up(); + + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker, 3); + conflictingContainerRemovingDockerCompose.up(); + + verify(dockerCompose, times(3)).up(); + verify(docker, times(2)).rm(new HashSet<>(Arrays.asList(conflictingContainer))); + } + + @Test + public void ignore_docker_execution_exceptions_in_rm() throws IOException, InterruptedException { + String conflictingContainer = "conflictingContainer"; + doThrow(new DockerExecutionException("The name \"" + conflictingContainer + "\" is already in use")).doNothing() + .when(dockerCompose).up(); + doThrow(DockerExecutionException.class).when(docker).rm(anySet()); + + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker); + conflictingContainerRemovingDockerCompose.up(); + + verify(dockerCompose, times(2)).up(); + verify(docker).rm(new HashSet<>(Arrays.asList(conflictingContainer))); + } + + @Test + public void fail_on_non_docker_execution_exceptions_in_rm() throws IOException, InterruptedException { + String conflictingContainer = "conflictingContainer"; + doThrow(new DockerExecutionException("The name \"" + conflictingContainer + "\" is already in use")).doNothing() + .when(dockerCompose).up(); + doThrow(RuntimeException.class).when(docker).rm(anySet()); + + exception.expect(RuntimeException.class); + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker); + conflictingContainerRemovingDockerCompose.up(); + } + + @Test + public void throw_exception_if_retry_attempts_exceeded() throws IOException, InterruptedException { + String conflictingContainer = "conflictingContainer"; + doThrow(new DockerExecutionException("The name \"" + conflictingContainer + "\" is already in use")) + .when(dockerCompose).up(); + + exception.expect(DockerExecutionException.class); + exception.expectMessage("docker-compose up failed"); + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker); + conflictingContainerRemovingDockerCompose.up(); + } + + @Test + public void parse_container_names_from_error_message() { + String conflictingContainer = "conflictingContainer"; + + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker); + Set conflictingContainerNames = conflictingContainerRemovingDockerCompose + .getConflictingContainerNames("The name \"" + conflictingContainer + "\" is already in use"); + + assertEquals(new HashSet<>(Arrays.asList(conflictingContainer)), conflictingContainerNames); + } + + @Test + public void parse_container_names_from_error_message_since_v13() { + String conflictingContainer = "conflictingContainer"; + + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker); + Set conflictingContainerNames = conflictingContainerRemovingDockerCompose + .getConflictingContainerNames("The container name \"" + conflictingContainer + "\" is already in use"); + + assertEquals(new HashSet<>(Arrays.asList(conflictingContainer)), conflictingContainerNames); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCommandLocationsTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCommandLocationsTests.java new file mode 100644 index 0000000000..c0b3a33955 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCommandLocationsTests.java @@ -0,0 +1,71 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import static java.util.Optional.empty; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; + +public class DockerCommandLocationsTests { + private static final String badLocation = "file/that/does/not/exist"; + private static final String otherBadLocation = "another/file/that/does/not/exist"; + + @Rule public TemporaryFolder folder = new TemporaryFolder(); + + private String goodLocation; + + @Before + public void before() throws IOException { + goodLocation = folder.newFile("docker-compose.yml").getAbsolutePath(); + } + + @Test public void + provide_the_first_docker_command_location_if_it_exists() { + DockerCommandLocations dockerCommandLocations = new DockerCommandLocations( + badLocation, + goodLocation, + otherBadLocation); + + assertThat(dockerCommandLocations.preferredLocation().get(), + is(goodLocation)); + } + + @Test public void + skip_paths_from_environment_variables_that_are_unset() { + DockerCommandLocations dockerCommandLocations = new DockerCommandLocations( + System.getenv("AN_UNSET_DOCKER_COMPOSE_PATH"), + goodLocation); + + assertThat(dockerCommandLocations.preferredLocation().get(), + is(goodLocation)); + } + + @Test public void + have_no_preferred_path_when_all_possible_paths_are_all_invalid() { + DockerCommandLocations dockerCommandLocations = new DockerCommandLocations( + badLocation); + + assertThat(dockerCommandLocations.preferredLocation(), + is(empty())); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecOptionTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecOptionTests.java new file mode 100644 index 0000000000..f6488ed768 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecOptionTests.java @@ -0,0 +1,31 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.collection.IsEmptyCollection.empty; +import static org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecOption.noOptions; + +public class DockerComposeExecOptionTests { + + @Test public void + be_constructable_with_no_args() { + DockerComposeExecOption option = noOptions(); + assertThat(option.options(), empty()); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeTests.java new file mode 100644 index 0000000000..90a9b9610c --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeTests.java @@ -0,0 +1,265 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.List; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Ports; + +import static org.apache.commons.io.IOUtils.toInputStream; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.core.Is.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecArgument.arguments; +import static org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecOption.options; + +public class DockerComposeTests { + + @Rule + public ExpectedException exception = ExpectedException.none(); + + private final DockerComposeExecutable executor = mock(DockerComposeExecutable.class); + private final DockerMachine dockerMachine = mock(DockerMachine.class); + private final DockerCompose compose = new DefaultDockerCompose(executor, dockerMachine); + + private final Process executedProcess = mock(Process.class); + private final Container container = mock(Container.class); + + @Before + public void before() throws IOException { + when(dockerMachine.getIp()).thenReturn("0.0.0.0"); + when(executor.execute(any())).thenReturn(executedProcess); + when(executedProcess.getInputStream()).thenReturn(toInputStream("0.0.0.0:7000->7000/tcp")); + when(executedProcess.exitValue()).thenReturn(0); + when(container.getContainerName()).thenReturn("my-container"); + } + + @Test + public void call_docker_compose_up_with_daemon_flag_on_up() throws IOException, InterruptedException { + compose.up(); + verify(executor).execute("up", "-d"); + } + + @Test + public void call_docker_compose_rm_with_force_and_volume_flags_on_rm() throws IOException, InterruptedException { + compose.rm(); + verify(executor).execute("rm", "--force", "-v"); + } + + @Test + public void call_docker_compose_stop_on_stop() throws IOException, InterruptedException { + compose.stop(container); + verify(executor).execute("stop", "my-container"); + } + + @Test + public void call_docker_compose_start_on_start() throws IOException, InterruptedException { + compose.start(container); + verify(executor).execute("start", "my-container"); + } + + @Test + public void parse_and_returns_container_names_on_ps() throws IOException, InterruptedException { + when(executedProcess.getInputStream()).thenReturn(toInputStream("ps\n----\ndir_db_1")); + List containerNames = compose.ps(); + verify(executor).execute("ps"); + assertThat(containerNames, contains(ContainerName.builder().semanticName("db").rawName("dir_db_1").build())); + } + + @Test + public void call_docker_compose_with_no_colour_flag_on_logs() throws IOException { + when(executedProcess.getInputStream()).thenReturn( + toInputStream("id"), + toInputStream("docker-compose version 1.5.6, build 1ad8866"), + toInputStream("logs")); + ByteArrayOutputStream output = new ByteArrayOutputStream(); + + compose.writeLogs("db", output); + verify(executor).execute("logs", "--no-color", "db"); + assertThat(new String(output.toByteArray(), StandardCharsets.UTF_8), is("logs")); + } + + @Test + public void call_docker_compose_with_no_container_on_logs() throws IOException { + reset(executor); + final Process mockIdProcess = mock(Process.class); + when(mockIdProcess.exitValue()).thenReturn(0); + final InputStream emptyStream = toInputStream(""); + when(mockIdProcess.getInputStream()).thenReturn(emptyStream, emptyStream, emptyStream, toInputStream("id")); + + final Process mockVersionProcess = mock(Process.class); + when(mockVersionProcess.exitValue()).thenReturn(0); + when(mockVersionProcess.getInputStream()).thenReturn(toInputStream("docker-compose version 1.5.6, build 1ad8866")); + when(executor.execute("ps", "-q", "db")).thenReturn(mockIdProcess); + when(executor.execute("-v")).thenReturn(mockVersionProcess); + when(executor.execute("logs", "--no-color", "db")).thenReturn(executedProcess); + when(executedProcess.getInputStream()).thenReturn(toInputStream("logs")); + ByteArrayOutputStream output = new ByteArrayOutputStream(); + + compose.writeLogs("db", output); + verify(executor, times(4)).execute("ps", "-q", "db"); + verify(executor).execute("logs", "--no-color", "db"); + assertThat(new String(output.toByteArray(), StandardCharsets.UTF_8), is("logs")); + } + + @Test + public void call_docker_compose_with_the_follow_flag_when_the_version_is_at_least_1_7_0_on_logs() + throws IOException { + when(executedProcess.getInputStream()).thenReturn( + toInputStream("id"), + toInputStream("docker-compose version 1.7.0, build 1ad8866"), + toInputStream("logs")); + ByteArrayOutputStream output = new ByteArrayOutputStream(); + compose.writeLogs("db", output); + verify(executor).execute("logs", "--no-color", "--follow", "db"); + assertThat(new String(output.toByteArray(), StandardCharsets.UTF_8), is("logs")); + } + + @Test + public void throw_exception_when_kill_exits_with_a_non_zero_exit_code() throws IOException, InterruptedException { + when(executedProcess.exitValue()).thenReturn(1); + exception.expect(DockerExecutionException.class); + exception.expectMessage("'docker-compose kill' returned exit code 1"); + compose.kill(); + } + + @Test + public void not_throw_exception_when_down_fails_because_the_command_does_not_exist() + throws IOException, InterruptedException { + when(executedProcess.exitValue()).thenReturn(1); + when(executedProcess.getInputStream()).thenReturn(toInputStream("No such command: down")); + compose.down(); + } + + @Test + public void throw_exception_when_down_fails_for_a_reason_other_than_the_command_not_being_present() + throws IOException, InterruptedException { + when(executedProcess.exitValue()).thenReturn(1); + when(executedProcess.getInputStream()).thenReturn(toInputStream("")); + + exception.expect(DockerExecutionException.class); + + compose.down(); + } + + @Test + public void use_the_remove_volumes_flag_when_down_exists() throws IOException, InterruptedException { + compose.down(); + verify(executor).execute("down", "--volumes"); + } + + @Test + public void parse_the_ps_output_on_ports() throws IOException, InterruptedException { + Ports ports = compose.ports("db"); + verify(executor).execute("ps", "db"); + assertThat(ports, is(new Ports(new DockerPort("0.0.0.0", 7000, 7000)))); + } + + @Test + public void throw_illegal_state_exception_when_there_is_no_container_found_for_ports() + throws IOException, InterruptedException { + when(executedProcess.getInputStream()).thenReturn(toInputStream("")); + exception.expect(IllegalStateException.class); + exception.expectMessage("No container with name 'db' found"); + compose.ports("db"); + } + + @Test + public void pass_concatenated_arguments_to_executor_on_docker_compose_exec() + throws IOException, InterruptedException { + when(executedProcess.getInputStream()).thenReturn(toInputStream("docker-compose version 1.7.0rc1, build 1ad8866")); + compose.exec(options("-d"), "container_1", arguments("ls")); + verify(executor, times(1)).execute("exec", "-T", "-d", "container_1", "ls"); + } + + @Test + public void fail_if_docker_compose_version_is_prior_1_7_on_docker_compose_exec() + throws IOException, InterruptedException { + when(executedProcess.getInputStream()).thenReturn(toInputStream("docker-compose version 1.5.6, build 1ad8866")); + exception.expect(IllegalStateException.class); + exception.expectMessage("You need at least docker-compose 1.7 to run docker-compose exec"); + compose.exec(options("-d"), "container_1", arguments("ls")); + } + + @Test + public void pass_concatenated_arguments_to_executor_on_docker_compose_run() + throws IOException, InterruptedException { + compose.run(DockerComposeRunOption.options("-d"), "container_1", DockerComposeRunArgument.arguments("ls")); + verify(executor, times(1)).execute("run", "-d", "container_1", "ls"); + } + + @Test + public void return_the_output_from_the_executed_process_on_docker_compose_exec() throws Exception { + String lsString = String.format("-rw-r--r-- 1 user 1318458867 11326 Mar 9 17:47 LICENSE%n" + + "-rw-r--r-- 1 user 1318458867 12570 May 12 14:51 README.md"); + + String versionString = "docker-compose version 1.7.0rc1, build 1ad8866"; + + DockerComposeExecutable processExecutor = mock(DockerComposeExecutable.class); + + addProcessToExecutor(processExecutor, processWithOutput(versionString), "-v"); + addProcessToExecutor(processExecutor, processWithOutput(lsString), "exec", "-T", "container_1", "ls", "-l"); + + DockerCompose processCompose = new DefaultDockerCompose(processExecutor, dockerMachine); + + assertThat(processCompose.exec(options(), "container_1", arguments("ls", "-l")), is(lsString)); + } + + @Test + public void return_the_output_from_the_executed_process_on_docker_compose_run() throws Exception { + String lsString = String.format("-rw-r--r-- 1 user 1318458867 11326 Mar 9 17:47 LICENSE%n" + + "-rw-r--r-- 1 user 1318458867 12570 May 12 14:51 README.md"); + + DockerComposeExecutable processExecutor = mock(DockerComposeExecutable.class); + + addProcessToExecutor(processExecutor, processWithOutput(lsString), "run", "-it", "container_1", "ls", "-l"); + + DockerCompose processCompose = new DefaultDockerCompose(processExecutor, dockerMachine); + + assertThat(processCompose.run(DockerComposeRunOption.options("-it"), "container_1", DockerComposeRunArgument.arguments("ls", "-l")), is(lsString)); + } + + private static void addProcessToExecutor(DockerComposeExecutable dockerComposeExecutable, Process process, String... commands) throws Exception { + when(dockerComposeExecutable.execute(commands)).thenReturn(process); + } + + private static Process processWithOutput(String output) { + Process mockedProcess = mock(Process.class); + when(mockedProcess.getInputStream()).thenReturn(toInputStream(output)); + when(mockedProcess.exitValue()).thenReturn(0); + return mockedProcess; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeVersionTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeVersionTests.java new file mode 100644 index 0000000000..5aa6d89e2c --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeVersionTests.java @@ -0,0 +1,50 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import com.github.zafarkhaja.semver.Version; +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThan; + + +public class DockerComposeVersionTests { + + @Test + public void compare_major_versions_first() { + assertThat(Version.valueOf("2.1.0").compareTo(Version.valueOf("1.2.1")), greaterThan(0)); + } + + @Test + public void compare_minor_versions_when_major_versions_are_the_same() { + assertThat(Version.valueOf("2.1.7").compareTo(Version.valueOf("2.3.2")), lessThan(0)); + } + + @Test + public void return_equals_for_the_same_version_strings() { + assertThat(Version.valueOf("2.1.2").compareTo(Version.valueOf("2.1.2")), is(0)); + } + + @Test + public void remove_non_digits_when_passing_version_string() { + assertThat( + DockerComposeVersion.parseFromDockerComposeVersion("docker-compose version 1.7.0rc1, build 1ad8866"), + is(Version.valueOf("1.7.0"))); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerTests.java new file mode 100644 index 0000000000..b6a8e7185a --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerTests.java @@ -0,0 +1,89 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; + +import com.github.zafarkhaja.semver.Version; +import org.junit.Before; +import org.junit.Test; + +import static org.apache.commons.io.IOUtils.toInputStream; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class DockerTests { + + private final DockerExecutable executor = mock(DockerExecutable.class); + private final Docker docker = new Docker(executor); + + private final Process executedProcess = mock(Process.class); + + @Before + public void before() throws IOException { + when(executor.execute(any())).thenReturn(executedProcess); + when(executedProcess.exitValue()).thenReturn(0); + } + + @Test + public void call_docker_rm_with_force_flag_on_rm() throws IOException, InterruptedException { + when(executedProcess.getInputStream()).thenReturn(toInputStream("")); + + docker.rm("testContainer"); + + verify(executor).execute("rm", "-f", "testContainer"); + } + + @Test + public void call_docker_network_ls() throws IOException, InterruptedException { + String lsOutput = "0.0.0.0:7000->7000/tcp"; + when(executedProcess.getInputStream()).thenReturn(toInputStream(lsOutput)); + + assertThat(docker.listNetworks(), is(lsOutput)); + + verify(executor).execute("network", "ls"); + } + + @Test + public void call_docker_network_prune() throws IOException, InterruptedException { + String lsOutput = "0.0.0.0:7000->7000/tcp"; + when(executedProcess.getInputStream()).thenReturn(toInputStream(lsOutput)); + + assertThat(docker.pruneNetworks(), is(lsOutput)); + + verify(executor).execute("network", "prune", "--force"); + } + + @Test + public void understand_old_version_format() throws IOException, InterruptedException { + when(executedProcess.getInputStream()).thenReturn(toInputStream("Docker version 1.7.2")); + + Version version = docker.configuredVersion(); + assertThat(version, is(Version.valueOf("1.7.2"))); + } + + @Test + public void understand_new_version_format() throws IOException, InterruptedException { + when(executedProcess.getInputStream()).thenReturn(toInputStream("Docker version 17.03.1-ce")); + + Version version = docker.configuredVersion(); + assertThat(version, is(Version.valueOf("17.3.1"))); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/GracefulShutdownStrategyTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/GracefulShutdownStrategyTests.java new file mode 100644 index 0000000000..6ea42effae --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/GracefulShutdownStrategyTests.java @@ -0,0 +1,42 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import org.junit.Test; +import org.mockito.InOrder; + +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; + +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; + +public class GracefulShutdownStrategyTests { + + @Test + public void call_down_then_kill_then_rm() throws Exception { + DockerCompose dockerCompose = mock(DockerCompose.class); + Docker docker = mock(Docker.class); + + ShutdownStrategy.GRACEFUL.shutdown(dockerCompose, docker); + + InOrder inOrder = inOrder(dockerCompose, docker); + inOrder.verify(dockerCompose).down(); + inOrder.verify(dockerCompose).kill(); + inOrder.verify(dockerCompose).rm(); + inOrder.verify(docker).pruneNetworks(); + inOrder.verifyNoMoreInteractions(); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/KillDownShutdownStrategyTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/KillDownShutdownStrategyTests.java new file mode 100644 index 0000000000..e4a815ef25 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/KillDownShutdownStrategyTests.java @@ -0,0 +1,41 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import org.junit.Test; +import org.mockito.InOrder; + +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; + +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; + +public class KillDownShutdownStrategyTests { + + @Test + public void call_kill_then_down() throws Exception { + DockerCompose dockerCompose = mock(DockerCompose.class); + Docker docker = mock(Docker.class); + + ShutdownStrategy.KILL_DOWN.shutdown(dockerCompose, docker); + + InOrder inOrder = inOrder(dockerCompose, docker); + inOrder.verify(dockerCompose).kill(); + inOrder.verify(dockerCompose).down(); + inOrder.verify(docker).pruneNetworks(); + inOrder.verifyNoMoreInteractions(); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryerTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryerTests.java new file mode 100644 index 0000000000..706f128f6e --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryerTests.java @@ -0,0 +1,115 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import org.joda.time.Duration; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import org.springframework.cloud.dataflow.common.test.docker.compose.utils.MockitoMultiAnswer; +import org.springframework.util.StopWatch; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class RetryerTests { + @Mock private Retryer.RetryableDockerOperation operation; + private final Retryer retryer = new Retryer(1, Duration.millis(0)); + + @Test + public void not_retry_if_the_operation_was_successful_and_return_result() throws Exception { + when(operation.call()).thenReturn("hi"); + + assertThat(retryer.runWithRetries(operation), is("hi")); + verify(operation).call(); + } + + @Test + public void should_not_pause_after_last_failure() throws Exception { + Retryer failFast = new Retryer(0, Duration.standardSeconds(1)); + when(operation.call()).thenThrow(new DockerExecutionException()); + StopWatch stopwatch = new StopWatch(); + stopwatch.start(); + try { + failFast.runWithRetries(operation); + } catch (DockerExecutionException e) { + // expected + } + stopwatch.stop(); + assertThat(stopwatch.getTotalTimeMillis(), lessThan(1000L)); + } + + @Test + public void retryer_should_wait_after_failure_before_trying_again() throws Exception { + Retryer timeRetryer = new Retryer(1, Duration.millis(100)); + + StopWatch stopwatch = new StopWatch(); + stopwatch.start(); + when(operation.call()).thenThrow(new DockerExecutionException()).thenAnswer(i -> { + stopwatch.stop(); + assertThat(stopwatch.getTotalTimeMillis(), greaterThan(100L)); + return "success"; + }); + + String result = timeRetryer.runWithRetries(operation); + assertThat(result, is("success")); + } + + @Test + public void retry_the_operation_if_it_failed_once_and_return_the_result_of_the_next_successful_call() throws Exception { + when(operation.call()).thenAnswer(MockitoMultiAnswer.of( + firstInvocation -> { + throw new DockerExecutionException(); + }, + secondInvocation -> "hola" + )); + + assertThat(retryer.runWithRetries(operation), is("hola")); + verify(operation, times(2)).call(); + } + + @Test + public void throw_the_last_exception_when_the_operation_fails_more_times_than_the_number_of_specified_retry_attempts() throws Exception { + DockerExecutionException finalException = new DockerExecutionException(); + + when(operation.call()).thenAnswer(MockitoMultiAnswer.of( + firstInvocation -> { + throw new DockerExecutionException(); + }, + secondInvocation -> { + throw finalException; + } + )); + + try { + retryer.runWithRetries(operation); + fail("Should have caught exception"); + } catch (DockerExecutionException actualException) { + assertThat(actualException, is(finalException)); + } + + verify(operation, times(2)).call(); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryingDockerComposeTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryingDockerComposeTests.java new file mode 100644 index 0000000000..13b70be8a0 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryingDockerComposeTests.java @@ -0,0 +1,103 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.util.List; + +import org.junit.Before; +import org.junit.Test; + +import org.springframework.cloud.dataflow.common.test.docker.compose.TestContainerNames; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Retryer.RetryableDockerOperation; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; +import static org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecArgument.arguments; +import static org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecOption.options; + +public class RetryingDockerComposeTests { + private final DockerCompose dockerCompose = mock(DockerCompose.class); + private final Retryer retryer = mock(Retryer.class); + private final RetryingDockerCompose retryingDockerCompose = new RetryingDockerCompose(retryer, dockerCompose); + private final List someContainerNames = TestContainerNames.of("hey"); + private static final String CONTAINER_NAME = "container"; + + @Before + public void before() throws IOException, InterruptedException { + retryerJustCallsOperation(); + } + + private void retryerJustCallsOperation() throws IOException, InterruptedException { + when(retryer.runWithRetries(anyOperation())).thenAnswer(invocation -> { + Retryer.RetryableDockerOperation operation = (Retryer.RetryableDockerOperation) invocation.getArguments()[0]; + return operation.call(); + }); + } + + private static RetryableDockerOperation anyOperation() { + return any(Retryer.RetryableDockerOperation.class); + } + + @Test + public void calls_up_on_the_underlying_docker_compose() throws IOException, InterruptedException { + retryingDockerCompose.up(); + + verifyRetryerWasUsed(); + verify(dockerCompose).up(); + verifyNoMoreInteractions(dockerCompose); + } + + @Test + public void call_ps_on_the_underlying_docker_compose_and_returns_the_same_value() throws IOException, InterruptedException { + when(dockerCompose.ps()).thenReturn(someContainerNames); + + assertThat(retryingDockerCompose.ps(), is(someContainerNames)); + + verifyRetryerWasUsed(); + verify(dockerCompose).ps(); + verifyNoMoreInteractions(dockerCompose); + } + + private void verifyRetryerWasUsed() throws IOException, InterruptedException { + verify(retryer).runWithRetries(anyOperation()); + } + + private void verifyRetryerWasNotUsed() throws IOException, InterruptedException { + verify(retryer, times(0)).runWithRetries(anyOperation()); + } + + @Test + public void calls_exec_on_the_underlying_docker_compose_and_not_invoke_retryer() throws IOException, InterruptedException { + retryingDockerCompose.exec(options("-d"), CONTAINER_NAME, arguments("ls")); + verifyRetryerWasNotUsed(); + verify(dockerCompose).exec(options("-d"), CONTAINER_NAME, arguments("ls")); + } + + @Test + public void calls_run_on_the_underlying_docker_compose_and_not_invoke_retryer() throws IOException, InterruptedException { + retryingDockerCompose.run(DockerComposeRunOption.options("-d"), CONTAINER_NAME, DockerComposeRunArgument.arguments("ls")); + verifyRetryerWasNotUsed(); + verify(dockerCompose).run(DockerComposeRunOption.options("-d"), CONTAINER_NAME, DockerComposeRunArgument.arguments("ls")); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/FileLogCollectorTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/FileLogCollectorTests.java new file mode 100644 index 0000000000..79022a1e78 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/FileLogCollectorTests.java @@ -0,0 +1,210 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.logging; + +import java.io.File; +import java.io.IOException; +import java.io.OutputStream; +import java.util.Arrays; +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.io.IOUtils; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.rules.TemporaryFolder; + +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.arrayContaining; +import static org.hamcrest.Matchers.arrayContainingInAnyOrder; +import static org.hamcrest.Matchers.emptyArray; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.springframework.cloud.dataflow.common.test.docker.compose.matchers.IOMatchers.fileContainingString; +import static org.springframework.cloud.dataflow.common.test.docker.compose.matchers.IOMatchers.fileWithName; + +public class FileLogCollectorTests { + + @Rule + public TemporaryFolder logDirectoryParent = new TemporaryFolder(); + @Rule + public ExpectedException exception = ExpectedException.none(); + + private final DockerCompose compose = mock(DockerCompose.class); + private File logDirectory; + private LogCollector logCollector; + + @Before + public void before() throws IOException { + logDirectory = logDirectoryParent.newFolder(); + logCollector = new FileLogCollector(logDirectory); + } + + @Test + public void throw_exception_when_created_with_file_as_the_log_directory() throws IOException { + File file = logDirectoryParent.newFile("cannot-use"); + + exception.expect(IllegalStateException.class); + exception.expectMessage("cannot be a file"); + + new FileLogCollector(file); + } + + @Test + public void create_the_log_directory_if_it_does_not_already_exist() { + File doesNotExistYetDirectory = logDirectoryParent.getRoot() + .toPath() + .resolve("doesNotExist") + .toFile(); + new FileLogCollector(doesNotExistYetDirectory); + assertThat(doesNotExistYetDirectory.exists(), is(true)); + } + + @Test + public void throw_exception_when_created_if_the_log_directory_does_not_exist_and_cannot_be_created() { + File cannotBeCreatedDirectory = cannotBeCreatedDirectory(); + + exception.expect(IllegalArgumentException.class); + exception.expectMessage("Error making"); + exception.expectMessage(cannotBeCreatedDirectory.getAbsolutePath()); + + new FileLogCollector(cannotBeCreatedDirectory); + } + + @Test + public void not_collect_any_logs_when_no_containers_are_running() throws IOException, InterruptedException { + when(compose.services()).thenReturn(Collections.emptyList()); + logCollector.startCollecting(compose); + logCollector.stopCollecting(); + assertThat(logDirectory.list(), is(emptyArray())); + } + + @Test + public void collect_logs_when_one_container_is_running_and_terminates_before_start_collecting_is_run() + throws Exception { + when(compose.services()).thenReturn(Collections.singletonList("db")); + when(compose.writeLogs(eq("db"), any(OutputStream.class))).thenAnswer(args -> { + OutputStream outputStream = (OutputStream) args.getArguments()[1]; + IOUtils.write("log", outputStream); + return false; + }); + logCollector.startCollecting(compose); + logCollector.stopCollecting(); + assertThat(logDirectory.listFiles(), arrayContaining(fileWithName("db.log"))); + assertThat(new File(logDirectory, "db.log"), is(fileContainingString("log"))); + } + + @Test + public void collect_logs_when_one_container_is_running_and_does_not_terminate_until_after_start_collecting_is_run() + throws Exception { + when(compose.services()).thenReturn(Collections.singletonList("db")); + CountDownLatch latch = new CountDownLatch(1); + when(compose.writeLogs(eq("db"), any(OutputStream.class))).thenAnswer(args -> { + if (!latch.await(1, TimeUnit.SECONDS)) { + throw new RuntimeException("Latch was not triggered"); + } + OutputStream outputStream = (OutputStream) args.getArguments()[1]; + IOUtils.write("log", outputStream); + return false; + }); + logCollector.startCollecting(compose); + latch.countDown(); + logCollector.stopCollecting(); + assertThat(logDirectory.listFiles(), arrayContaining(fileWithName("db.log"))); + assertThat(new File(logDirectory, "db.log"), is(fileContainingString("log"))); + } + + @Test + public void collect_logs_when_one_container_is_running_and_does_not_terminate() + throws IOException, InterruptedException { + when(compose.services()).thenReturn(Collections.singletonList("db")); + CountDownLatch latch = new CountDownLatch(1); + when(compose.writeLogs(eq("db"), any(OutputStream.class))).thenAnswer(args -> { + OutputStream outputStream = (OutputStream) args.getArguments()[1]; + IOUtils.write("log", outputStream); + try { + latch.await(1, TimeUnit.SECONDS); + fail("Latch was not triggered"); + } catch (InterruptedException e) { + // Success + return true; + } + fail("Latch was not triggered"); + return false; + }); + logCollector.startCollecting(compose); + logCollector.stopCollecting(); + assertThat(logDirectory.listFiles(), arrayContaining(fileWithName("db.log"))); + assertThat(new File(logDirectory, "db.log"), is(fileContainingString("log"))); + latch.countDown(); + } + + @Test + public void collect_logs_in_parallel_for_two_containers() throws IOException, InterruptedException { + when(compose.services()).thenReturn(Arrays.asList("db", "db2")); + CountDownLatch dbLatch = new CountDownLatch(1); + when(compose.writeLogs(eq("db"), any(OutputStream.class))).thenAnswer(args -> { + OutputStream outputStream = (OutputStream) args.getArguments()[1]; + IOUtils.write("log", outputStream); + dbLatch.countDown(); + return true; + }); + CountDownLatch db2Latch = new CountDownLatch(1); + when(compose.writeLogs(eq("db2"), any(OutputStream.class))).thenAnswer(args -> { + OutputStream outputStream = (OutputStream) args.getArguments()[1]; + IOUtils.write("other", outputStream); + db2Latch.countDown(); + return true; + }); + + logCollector.startCollecting(compose); + assertThat(dbLatch.await(1, TimeUnit.SECONDS), is(true)); + assertThat(db2Latch.await(1, TimeUnit.SECONDS), is(true)); + + assertThat(logDirectory.listFiles(), arrayContainingInAnyOrder(fileWithName("db.log"), fileWithName("db2.log"))); + assertThat(new File(logDirectory, "db.log"), is(fileContainingString("log"))); + assertThat(new File(logDirectory, "db2.log"), is(fileContainingString("other"))); + + logCollector.stopCollecting(); + } + + @Test + public void throw_exception_when_trying_to_start_a_started_collector_a_second_time() + throws IOException, InterruptedException { + when(compose.services()).thenReturn(Collections.singletonList("db")); + logCollector.startCollecting(compose); + exception.expect(RuntimeException.class); + exception.expectMessage("Cannot start collecting the same logs twice"); + logCollector.startCollecting(compose); + } + + private static File cannotBeCreatedDirectory() { + File cannotBeCreatedDirectory = mock(File.class); + when(cannotBeCreatedDirectory.isFile()).thenReturn(false); + when(cannotBeCreatedDirectory.mkdirs()).thenReturn(false); + when(cannotBeCreatedDirectory.getAbsolutePath()).thenReturn("cannot/exist/directory"); + return cannotBeCreatedDirectory; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogDirectoryTest.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogDirectoryTest.java new file mode 100644 index 0000000000..016435eabb --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogDirectoryTest.java @@ -0,0 +1,33 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.logging; + +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +public class LogDirectoryTest { + + + @Test + public void gradleDockerLogsDirectory_should_use_class_simple_name() { + String directory = LogDirectory.gradleDockerLogsDirectory(SomeTestClass.class); + assertThat(directory, is("build/dockerLogs/SomeTestClass")); + } + + private static class SomeTestClass {} +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/AvailablePortMatcherTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/AvailablePortMatcherTests.java new file mode 100644 index 0000000000..5b8a3992d3 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/AvailablePortMatcherTests.java @@ -0,0 +1,52 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.matchers; + +import static java.util.Collections.emptyList; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.springframework.cloud.dataflow.common.test.docker.compose.matchers.AvailablePortMatcher.areAvailable; + +import java.util.Arrays; +import java.util.List; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; + +public class AvailablePortMatcherTests { + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void succeed_when_there_are_no_unavailable_ports() { + List unavailablePorts = emptyList(); + assertThat(unavailablePorts, areAvailable()); + } + + @Test + public void throw_exception_when_there_are_some_unavailable_ports() { + List unavailablePorts = Arrays.asList(new DockerPort("0.0.0.0", 1234, 1234), + new DockerPort("1.2.3.4", 2345, 3456)); + exception.expect(AssertionError.class); + exception.expectMessage("For host with ip address: 0.0.0.0"); + exception.expectMessage("external port '1234' mapped to internal port '1234' was unavailable"); + exception.expectMessage("For host with ip address: 1.2.3.4"); + exception.expectMessage("external port '2345' mapped to internal port '3456' was unavailable"); + assertThat(unavailablePorts, areAvailable()); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/DockerMachineEnvironmentMatcher.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/DockerMachineEnvironmentMatcher.java new file mode 100644 index 0000000000..dd32890519 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/DockerMachineEnvironmentMatcher.java @@ -0,0 +1,64 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.matchers; + +import static java.util.stream.Collectors.toMap; +import static org.hamcrest.collection.IsMapContaining.hasEntry; + +import java.util.HashMap; +import java.util.Map; +import org.hamcrest.Description; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine; + +public class DockerMachineEnvironmentMatcher extends ValueCachingMatcher { + + private final Map expected; + + public DockerMachineEnvironmentMatcher(Map expected) { + this.expected = expected; + } + + @Override + public void describeTo(Description description) { + description.appendText("Docker Machine to have these environment variables:\n"); + description.appendValue(expected); + } + + @Override + protected boolean matchesSafely() { + return missingEnvironmentVariables().isEmpty(); + } + + @Override + protected void describeMismatchSafely(DockerMachine item, Description mismatchDescription) { + mismatchDescription.appendText("\nThese environment variables were missing:\n"); + mismatchDescription.appendValue(missingEnvironmentVariables()); + } + + public static DockerMachineEnvironmentMatcher containsEnvironment(Map environment) { + return new DockerMachineEnvironmentMatcher(new HashMap<>(environment)); + } + + private Map missingEnvironmentVariables() { + Map environment = value().configuredDockerComposeProcess() + .environment(); + return expected.entrySet() + .stream() + .filter(required -> !hasEntry(required.getKey(), required.getValue()).matches(environment)) + .collect(toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/IOMatchers.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/IOMatchers.java new file mode 100644 index 0000000000..8869f45f7f --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/IOMatchers.java @@ -0,0 +1,188 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.matchers; + +import static org.hamcrest.Matchers.containsString; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.regex.Pattern; +import org.apache.commons.io.FileUtils; +import org.hamcrest.Description; +import org.hamcrest.FeatureMatcher; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeDiagnosingMatcher; + +public final class IOMatchers { + private IOMatchers() {} + + public static Matcher hasFiles(int numberOfFiles) { + return new ValueCachingMatcher() { + private String[] files = new String[0]; + + @Override + public void describeTo(Description description) { + description.appendText("directory ") + .appendValue(value()) + .appendText(" to have " + numberOfFiles + " files"); + } + + @Override + protected void describeMismatchSafely(File item, Description mismatchDescription) { + mismatchDescription.appendText("directory ") + .appendValue(item) + .appendText(" had " + files.length + " files ") + .appendText(Arrays.toString(files)) + .appendText(" or is not a directory"); + } + + @Override + protected boolean matchesSafely() { + files = value().list() != null ? value().list() : new String[0]; + return files.length == numberOfFiles; + } + }; + } + + public static Matcher fileWithName(String filename) { + return new ValueCachingMatcher() { + + @Override + public void describeTo(Description description) { + description.appendText("file with name " + filename); + } + + @Override + protected void describeMismatchSafely(File item, Description mismatchDescription) { + mismatchDescription.appendText("file ") + .appendValue(item) + .appendText(" did not have name " + filename); + } + + @Override + protected boolean matchesSafely() { + return value().getName().equals(filename); + } + }; + } + + public static Matcher fileContainingString(String contents) { + return fileWithConents(containsString(contents)); + } + + public static Matcher matchingPattern(String patternStr) { + return new TypeSafeDiagnosingMatcher() { + @Override + protected boolean matchesSafely(String text, Description mismatchDescription) { + Pattern pattern = Pattern.compile(patternStr, Pattern.DOTALL); + boolean matches = pattern.matcher(text).matches(); + if (!matches) { + mismatchDescription.appendText(text); + } + return matches; + } + + @Override + public void describeTo(Description description) { + description.appendText("matching '" + patternStr + "'"); + } + }; + } + + public static Matcher fileWithConents(Matcher contentsMatcher) { + return new FeatureMatcher(contentsMatcher, "file contents", "file contents") { + + @Override + protected String featureValueOf(File file) { + try { + return FileUtils.readFileToString(file, StandardCharsets.UTF_8); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + }; + } + + public static Matcher fileExists() { + return new ValueCachingMatcher() { + @Override + public void describeTo(Description description) { + description.appendText("file ") + .appendValue(value()) + .appendText(" to exist"); + } + + @Override + protected void describeMismatchSafely(File item, Description mismatchDescription) { + mismatchDescription.appendText("file ") + .appendValue(item.getAbsolutePath()) + .appendText(" did not exist"); + } + + @Override + protected boolean matchesSafely() { + return value().exists(); + } + }; + } + + public static Matcher isDirectory() { + return new ValueCachingMatcher() { + @Override + public void describeTo(Description description) { + description.appendValue(value()) + .appendText(" is directory"); + } + + @Override + protected void describeMismatchSafely(File item, Description mismatchDescription) { + mismatchDescription.appendValue(item.getAbsolutePath()) + .appendText(" is not a directory"); + } + + @Override + protected boolean matchesSafely() { + return value().isDirectory(); + } + }; + } + + public static Matcher pathFileExists() { + return new ValueCachingMatcher() { + @Override + public void describeTo(Description description) { + description.appendText("file ") + .appendValue(value()) + .appendText(" to exist"); + } + + @Override + protected void describeMismatchSafely(Path item, Description mismatchDescription) { + mismatchDescription.appendText("file ") + .appendValue(item) + .appendText(" did not exist"); + } + + @Override + protected boolean matchesSafely() { + return value().toFile().exists(); + } + }; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/ValueCachingMatcher.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/ValueCachingMatcher.java new file mode 100644 index 0000000000..59b7efc267 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/ValueCachingMatcher.java @@ -0,0 +1,38 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.matchers; + +import org.hamcrest.Description; +import org.hamcrest.TypeSafeMatcher; + +public abstract class ValueCachingMatcher extends TypeSafeMatcher { + private T cachedValue; + + @Override + protected abstract void describeMismatchSafely(T item, Description mismatchDescription); + + @Override + protected boolean matchesSafely(T value) { + cachedValue = value; + return matchesSafely(); + } + + protected abstract boolean matchesSafely(); + + public T value() { + return cachedValue; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/utils/MockitoMultiAnswer.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/utils/MockitoMultiAnswer.java new file mode 100644 index 0000000000..76a8a576dd --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/utils/MockitoMultiAnswer.java @@ -0,0 +1,48 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.utils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +public class MockitoMultiAnswer implements Answer { + private final List> invocationHandlers; + private int numberOfTimesCalled = 0; + + public MockitoMultiAnswer(List> invocationHandlers) { + this.invocationHandlers = new ArrayList<>(invocationHandlers); + } + + @SafeVarargs + public static MockitoMultiAnswer of(Function... invocationHandlers) { + return new MockitoMultiAnswer<>(Arrays.asList(invocationHandlers)); + } + + @Override + public T answer(InvocationOnMock invocation) throws Throwable { + if (numberOfTimesCalled >= invocationHandlers.size()) { + throw new RuntimeException("Called more times than supported"); + } + + Function invocationHandler = invocationHandlers.get(numberOfTimesCalled); + numberOfTimesCalled++; + return invocationHandler.apply(invocation); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/docker-compose-cp1.yaml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/docker-compose-cp1.yaml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/native-healthcheck.yaml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/native-healthcheck.yaml new file mode 100644 index 0000000000..e7d566f8db --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/native-healthcheck.yaml @@ -0,0 +1,9 @@ +services: + withHealthcheck: + image: gliderlabs/alpine:3.4 + command: sh -c 'while true; do sleep 10; done' + healthcheck: + test: ["CMD", "test", "-f", "healthy"] + interval: 100ms + timeout: 1s + retries: 1 diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/no-healthcheck.yaml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/no-healthcheck.yaml new file mode 100644 index 0000000000..0006d008ca --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/no-healthcheck.yaml @@ -0,0 +1,4 @@ +services: + noHealthcheck: + image: gliderlabs/alpine:3.4 + command: sh -c 'while true; do sleep 10; done' diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/org/springframework/cloud/dataflow/common/test/docker/compose/docker-compose-cp2.yaml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/org/springframework/cloud/dataflow/common/test/docker/compose/docker-compose-cp2.yaml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/spring-cloud-dataflow-completion/pom.xml b/spring-cloud-dataflow-completion/pom.xml index 5a71d70fb1..77fa794645 100644 --- a/spring-cloud-dataflow-completion/pom.xml +++ b/spring-cloud-dataflow-completion/pom.xml @@ -1,24 +1,35 @@ - + 4.0.0 org.springframework.cloud spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-completion + spring-cloud-dataflow-completion + Spring Cloud Data Flow Completion + + true + 3.4.1 + org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-registry + ${project.version} org.springframework.cloud spring-cloud-dataflow-configuration-metadata + ${project.version} org.springframework.boot @@ -26,4 +37,36 @@ test + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/BootVersionsCompletionProviderTests.java b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/BootVersionsCompletionProviderTests.java deleted file mode 100644 index 9a7a8bdfea..0000000000 --- a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/BootVersionsCompletionProviderTests.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright 2019-2021 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.cloud.dataflow.completion; - -import java.io.File; -import java.util.ArrayList; -import java.util.List; - -import org.junit.Test; -import org.junit.runner.RunWith; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.boot.test.mock.mockito.MockBean; -import org.springframework.cloud.dataflow.audit.service.DefaultAuditRecordService; -import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver; -import org.springframework.cloud.dataflow.configuration.metadata.BootApplicationConfigurationMetadataResolver; -import org.springframework.cloud.dataflow.configuration.metadata.container.ContainerImageMetadataResolver; -import org.springframework.cloud.dataflow.configuration.metadata.container.DefaultContainerImageMetadataResolver; -import org.springframework.cloud.dataflow.container.registry.ContainerRegistryService; -import org.springframework.cloud.dataflow.core.AppRegistration; -import org.springframework.cloud.dataflow.core.ApplicationType; -import org.springframework.cloud.dataflow.core.DefaultStreamDefinitionService; -import org.springframework.cloud.dataflow.core.StreamDefinitionService; -import org.springframework.cloud.dataflow.registry.repository.AppRegistrationRepository; -import org.springframework.cloud.dataflow.registry.service.AppRegistryService; -import org.springframework.cloud.dataflow.registry.service.DefaultAppRegistryService; -import org.springframework.cloud.dataflow.registry.support.AppResourceCommon; -import org.springframework.cloud.deployer.resource.maven.MavenProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.io.FileSystemResourceLoader; -import org.springframework.test.context.junit4.SpringRunner; - -import static org.hamcrest.CoreMatchers.hasItems; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.mock; - -/** - * Tests that the completion mechanism knows how to cope with different versions of Spring - * Boot, including using reflection on classes packaged in the boot archive when needed - * (e.g. enum values completion). - * - * @author Eric Bottard - * @author Christian Tzolov - */ -@RunWith(SpringRunner.class) -@SpringBootTest(classes = { CompletionConfiguration.class, - BootVersionsCompletionProviderTests.Mocks.class }, properties = { - "spring.main.allow-bean-definition-overriding=true" }) -@SuppressWarnings("unchecked") -public class BootVersionsCompletionProviderTests { - - @Autowired - private StreamCompletionProvider completionProvider; - - @Test - public void testBoot13Layout() { - List result = completionProvider.complete("boot13 --", 0); - assertThat(result, hasItems(Proposals.proposalThat(is("boot13 --level=")), Proposals.proposalThat(is("boot13 --number=")), - Proposals.proposalThat(is("boot13 --some-string=")))); - - // Test that custom classes can also be loaded correctly - result = completionProvider.complete("boot13 --level=", 0); - assertThat(result, hasItems(Proposals.proposalThat(is("boot13 --level=low")), Proposals.proposalThat(is("boot13 --level=high")))); - - result = completionProvider.complete("boot13 --number=", 0); - assertThat(result, hasItems(Proposals.proposalThat(is("boot13 --number=one")), Proposals.proposalThat(is("boot13 --number=two")))); - } - - @Test - public void testBoot14Layout() { - List result = completionProvider.complete("boot14 --", 0); - assertThat(result, hasItems(Proposals.proposalThat(is("boot14 --level=")), Proposals.proposalThat(is("boot14 --number=")), - Proposals.proposalThat(is("boot14 --some-string=")))); - - // Test that custom classes can also be loaded correctly - result = completionProvider.complete("boot14 --level=", 0); - assertThat(result, - hasItems(Proposals.proposalThat(is("boot14 --level=very_low")), Proposals.proposalThat(is("boot14 --level=very_high")))); - - result = completionProvider.complete("boot14 --number=", 0); - assertThat(result, hasItems(Proposals.proposalThat(is("boot14 --number=one")), Proposals.proposalThat(is("boot14 --number=two")))); - - } - - /** - * A set of mocks that consider the contents of the {@literal boot_versions/} - * directory as app archives. - * - * @author Eric Bottard - * @author Mark Fisher - */ - @Configuration - public static class Mocks { - - private static final File ROOT = new File("src/test/resources", - BootVersionsCompletionProviderTests.Mocks.class.getPackage().getName().replace('.', '/') - + "/boot_versions"); - - @MockBean - private DefaultContainerImageMetadataResolver containerImageMetadataResolver; - - @Bean - @ConditionalOnMissingBean - public StreamDefinitionService streamDefinitionService() { - return new DefaultStreamDefinitionService(); - } - - @Bean - public AppRegistryService appRegistry() { - - return new DefaultAppRegistryService(mock(AppRegistrationRepository.class), - new AppResourceCommon(new MavenProperties(), new FileSystemResourceLoader()), - mock(DefaultAuditRecordService.class)) { - - @Override - public boolean appExist(String name, ApplicationType type) { - return false; - } - - @Override - public List findAll() { - List result = new ArrayList<>(); - result.add(find("boot13", ApplicationType.source)); - result.add(find("boot14", ApplicationType.source)); - return result; - } - - @Override - public AppRegistration find(String name, ApplicationType type) { - String filename = name + "-1.0.0.BUILD-SNAPSHOT.jar"; - File file = new File(ROOT, filename); - if (file.exists()) { - return new AppRegistration(name, type, file.toURI(), file.toURI()); - } - else { - return null; - } - } - - @Override - public AppRegistration save(AppRegistration app) { - return null; - } - - @Override - protected boolean isOverwrite(AppRegistration app, boolean overwrite) { - return false; - } - }; - } - - @MockBean - ContainerRegistryService containerRegistryService; - - @Bean - public ContainerImageMetadataResolver containerImageMetadataResolver(ContainerRegistryService containerRegistryService) { - return new DefaultContainerImageMetadataResolver(containerRegistryService); - } - - @Bean - public ApplicationConfigurationMetadataResolver metadataResolver() { - return new BootApplicationConfigurationMetadataResolver( - StreamCompletionProviderTests.class.getClassLoader(), containerImageMetadataResolver); - } - } -} diff --git a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/CompletionUtilsTests.java b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/CompletionUtilsTests.java index 2969578576..a42b8a2013 100644 --- a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/CompletionUtilsTests.java +++ b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/CompletionUtilsTests.java @@ -16,19 +16,20 @@ package org.springframework.cloud.dataflow.completion; -import org.junit.Assert; -import org.junit.Test; + +import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.core.DefaultStreamDefinitionService; import org.springframework.cloud.dataflow.core.StreamDefinition; import org.springframework.cloud.dataflow.core.StreamDefinitionService; -import static org.hamcrest.core.Is.is; +import static org.assertj.core.api.Assertions.assertThat; /** * Unit tests for CompletionUtils. * * @author Eric Bottard + * @author Corneil du Plessis */ public class CompletionUtilsTests { @@ -37,16 +38,16 @@ public class CompletionUtilsTests { @Test public void testLabelQualification() { StreamDefinition streamDefinition = new StreamDefinition("foo", "http | filter"); - Assert.assertThat(CompletionUtils.maybeQualifyWithLabel("filter", - this.streamDefinitionService.getAppDefinitions(streamDefinition)), is("filter2: filter")); + assertThat(CompletionUtils.maybeQualifyWithLabel("filter", + this.streamDefinitionService.getAppDefinitions(streamDefinition))).isEqualTo("filter2: filter"); streamDefinition = new StreamDefinition("foo", "http | filter"); - Assert.assertThat(CompletionUtils.maybeQualifyWithLabel("transform", - this.streamDefinitionService.getAppDefinitions(streamDefinition)), is("transform")); + assertThat(CompletionUtils.maybeQualifyWithLabel("transform", + this.streamDefinitionService.getAppDefinitions(streamDefinition))).isEqualTo("transform"); streamDefinition = new StreamDefinition("foo", "http | filter | filter2: filter"); - Assert.assertThat(CompletionUtils.maybeQualifyWithLabel("filter", - this.streamDefinitionService.getAppDefinitions(streamDefinition)), is("filter3: filter")); + assertThat(CompletionUtils.maybeQualifyWithLabel("filter", + this.streamDefinitionService.getAppDefinitions(streamDefinition))).isEqualTo("filter3: filter"); } } diff --git a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/Proposals.java b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/Proposals.java index d5fd3be480..c1ba1eb878 100644 --- a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/Proposals.java +++ b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/Proposals.java @@ -16,20 +16,50 @@ package org.springframework.cloud.dataflow.completion; -import org.hamcrest.FeatureMatcher; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.assertj.core.api.Condition; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * Contains helper Hamcrest matchers for testing completion proposal related code. * * @author Eric Bottard + * @author Corneil du Plessis */ class Proposals { - static org.hamcrest.Matcher proposalThat(org.hamcrest.Matcher matcher) { - return new FeatureMatcher(matcher, "a proposal whose text", "text") { - @Override - protected String featureValueOf(CompletionProposal actual) { - return actual.getText(); - } - }; + private static final Logger log = LoggerFactory.getLogger(Proposals.class); + + static Condition proposalThatIs(String text) { + return new Condition<>(item -> text.equals(item.getText()), "proposalThatIs"); + } + static Condition proposalThatStartsWith(String text) { + return new Condition<>(item -> item.getText().startsWith(text), "proposalThatStartsWith"); + } + public static Condition> proposalThatHas(boolean all, String ...text) { + Set texts = new HashSet<>(Arrays.asList(text)); + if(all) { + return new Condition<>(items -> { + Set itemStrings = items.stream().map(completionProposal -> completionProposal.getText()).collect(Collectors.toSet()); + return texts.stream().allMatch(txt -> itemStrings.contains(txt)); + },"proposalThatHasAll"); + } else { + return new Condition<>(items -> { + Set itemStrings = items.stream().map(completionProposal -> completionProposal.getText()).collect(Collectors.toSet()); + return texts.stream().anyMatch(txt -> itemStrings.contains(txt)); + }, "proposalThatHasAny"); + } + } + public static Condition> proposalThatHasAll(String ...text) { + return proposalThatHas(true, text); + } + public static Condition> proposalThatHasAny(String ...text) { + return proposalThatHas(false, text); } } diff --git a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/StreamCompletionProviderTests.java b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/StreamCompletionProviderTests.java index 7f105a46a8..a5fc88c109 100644 --- a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/StreamCompletionProviderTests.java +++ b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/StreamCompletionProviderTests.java @@ -16,20 +16,16 @@ package org.springframework.cloud.dataflow.completion; -import org.junit.Test; -import org.junit.runner.RunWith; + +import org.assertj.core.api.Condition; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver; import org.springframework.test.context.junit4.SpringRunner; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.hasItems; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.startsWith; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; /** * Integration tests for StreamCompletionProvider. @@ -42,8 +38,8 @@ * * @author Eric Bottard * @author Mark Fisher + * @author Corneil du Plessis */ -@RunWith(SpringRunner.class) @SpringBootTest(classes = { CompletionConfiguration.class, CompletionTestsMocks.class }, properties = { "spring.main.allow-bean-definition-overriding=true" }) @SuppressWarnings("unchecked") @@ -55,141 +51,128 @@ public class StreamCompletionProviderTests { @Test // => file,http,etc public void testEmptyStartShouldProposeSourceOrUnboundApps() { - assertThat(completionProvider.complete("", 1), hasItems(Proposals.proposalThat(is("orange")), - Proposals.proposalThat(is("http")), Proposals.proposalThat(is("hdfs")))); - assertThat(completionProvider.complete("", 1), not(hasItems(Proposals.proposalThat(is("log"))))); + assertThat(completionProvider.complete("", 1)).has(Proposals.proposalThatHasAny("orange", "http", "hdfs")); + assertThat(completionProvider.complete("", 1)).doNotHave(Proposals.proposalThatIs("log")); } @Test // fi => file public void testUnfinishedAppNameShouldReturnCompletions() { - assertThat(completionProvider.complete("h", 1), hasItems(Proposals.proposalThat(is("http")), Proposals.proposalThat(is("hdfs")))); - assertThat(completionProvider.complete("ht", 1), hasItems(Proposals.proposalThat(is("http")))); - assertThat(completionProvider.complete("ht", 1), not(hasItems(Proposals.proposalThat(is("hdfs"))))); + assertThat(completionProvider.complete("h", 1)).has(Proposals.proposalThatHasAny("http", "hdfs")); + assertThat(completionProvider.complete("ht", 1)).have(Proposals.proposalThatIs("http")); + assertThat(completionProvider.complete("ht", 1)).doNotHave(Proposals.proposalThatIs("hdfs")); } @Test public void testUnfinishedUnboundAppNameShouldReturnCompletions2() { - assertThat(completionProvider.complete("", 1), hasItems(Proposals.proposalThat(is("orange")))); - assertThat(completionProvider.complete("o", 1), hasItems(Proposals.proposalThat(is("orange")))); - assertThat(completionProvider.complete("oran", 1), hasItems(Proposals.proposalThat(is("orange")))); - assertThat(completionProvider.complete("orange", 1), hasItems(Proposals.proposalThat(is("orange --expression=")), - Proposals.proposalThat(is("orange --fooble=")),Proposals.proposalThat(is("orange --expresso=")))); - assertThat(completionProvider.complete("o1: orange||", 1), hasItems(Proposals.proposalThat(is("o1: orange|| orange")))); - assertThat(completionProvider.complete("o1: orange|| ", 1), hasItems(Proposals.proposalThat(is("o1: orange|| orange")))); - assertThat(completionProvider.complete("o1: orange ||", 1), hasItems(Proposals.proposalThat(is("o1: orange || orange")))); - assertThat(completionProvider.complete("o1: orange|| or", 1), hasItems(Proposals.proposalThat(is("o1: orange|| orange")))); - assertThat(completionProvider.complete("http | o", 1), empty()); - assertThat(completionProvider.complete("http|| o", 1), hasItems(Proposals.proposalThat(is("http|| orange")))); + assertThat(completionProvider.complete("", 1)).haveAtLeastOne(Proposals.proposalThatIs("orange")); + assertThat(completionProvider.complete("o", 1)).have(Proposals.proposalThatIs("orange")); + assertThat(completionProvider.complete("oran", 1)).have(Proposals.proposalThatIs("orange")); + assertThat(completionProvider.complete("orange", 1)).has(Proposals.proposalThatHasAll("orange --expression=","orange --fooble=","orange --expresso=")); + assertThat(completionProvider.complete("o1: orange||", 1)).haveAtLeastOne(Proposals.proposalThatIs("o1: orange|| orange")); + assertThat(completionProvider.complete("o1: orange|| ", 1)).have(Proposals.proposalThatIs("o1: orange|| orange")); + assertThat(completionProvider.complete("o1: orange ||", 1)).have(Proposals.proposalThatIs("o1: orange || orange")); + assertThat(completionProvider.complete("o1: orange|| or", 1)).have(Proposals.proposalThatIs("o1: orange|| orange")); + assertThat(completionProvider.complete("http | o", 1)).isEmpty(); + assertThat(completionProvider.complete("http|| o", 1)).have(Proposals.proposalThatIs("http|| orange")); } @Test // file | filter => file | filter | foo, etc public void testValidSubStreamDefinitionShouldReturnPipe() { - assertThat(completionProvider.complete("http | filter ", 1), hasItems(Proposals.proposalThat(is("http | filter | log")))); - assertThat(completionProvider.complete("http | filter ", 1), - not(hasItems(Proposals.proposalThat(is("http | filter | http"))))); + assertThat(completionProvider.complete("http | filter ", 1)).haveAtLeastOne(Proposals.proposalThatIs("http | filter | log")); + assertThat(completionProvider.complete("http | filter ", 1)).doNotHave(Proposals.proposalThatIs("http | filter | http")); } @Test // file | filter => file | filter --foo=, etc public void testValidSubStreamDefinitionShouldReturnAppOptions() { - assertThat(completionProvider.complete("http | filter ", 1), hasItems( - Proposals.proposalThat(is("http | filter --expression=")), Proposals.proposalThat(is("http | filter --expresso=")))); + assertThat(completionProvider.complete("http | filter ", 1)).has(Proposals.proposalThatHasAll("http | filter --expression=", "http | filter --expresso=")); // Same as above, no final space - assertThat(completionProvider.complete("http | filter", 1), hasItems( - Proposals.proposalThat(is("http | filter --expression=")), Proposals.proposalThat(is("http | filter --expresso=")))); + assertThat(completionProvider.complete("http | filter", 1)).has(Proposals.proposalThatHasAll("http | filter --expression=", "http | filter --expresso=")); } @Test // file | filter - => file | filter --foo,etc public void testOneDashShouldReturnTwoDashes() { - assertThat(completionProvider.complete("http | filter -", 1), hasItems( - Proposals.proposalThat(is("http | filter --expression=")), Proposals.proposalThat(is("http | filter --expresso=")))); + assertThat(completionProvider.complete("http | filter -", 1)).has(Proposals.proposalThatHasAll("http | filter --expression=", "http | filter --expresso=")); } @Test // file | filter -- => file | filter --foo,etc public void testTwoDashesShouldReturnOptions() { - assertThat(completionProvider.complete("http | filter --", 1), hasItems( - Proposals.proposalThat(is("http | filter --expression=")), Proposals.proposalThat(is("http | filter --expresso=")))); + assertThat(completionProvider.complete("http | filter --", 1)).has(Proposals.proposalThatHasAll("http | filter --expression=", "http | filter --expresso=")); } @Test // file | => file | foo,etc public void testDanglingPipeShouldReturnExtraApps() { - assertThat(completionProvider.complete("http |", 1), hasItems(Proposals.proposalThat(is("http | filter")))); - assertThat(completionProvider.complete("http | filter |", 1), - hasItems(Proposals.proposalThat(is("http | filter | log")), Proposals.proposalThat(is("http | filter | filter2: filter")))); + assertThat(completionProvider.complete("http |", 1)).haveAtLeastOne(Proposals.proposalThatIs("http | filter")); + assertThat(completionProvider.complete("http | filter |", 1)).has(Proposals.proposalThatHasAll("http | filter | log", "http | filter | filter2: filter")); } @Test // file --p => file --preventDuplicates=, file --pattern= public void testUnfinishedOptionNameShouldComplete() { - assertThat(completionProvider.complete("http --p", 1), hasItems(Proposals.proposalThat(is("http --port=")))); + assertThat(completionProvider.complete("http --p", 1)).have(Proposals.proposalThatIs("http --port=")); } @Test // file | counter --name=foo --inputType=bar => we're done public void testSinkWithAllOptionsSetCantGoFurther() { - assertThat(completionProvider.complete("http | log --port=1234 --level=debug", 1), empty()); + assertThat(completionProvider.complete("http | log --port=1234 --level=debug", 1)).isEmpty(); } @Test // file | counter --name= => nothing public void testInGenericOptionValueCantProposeAnything() { - assertThat(completionProvider.complete("http --port=", 1), empty()); + assertThat(completionProvider.complete("http --port=", 1)).isEmpty(); } @Test // :foo > ==> add app names public void testDestinationIntoApps() { - assertThat(completionProvider.complete(":foo >", 1), - hasItems(Proposals.proposalThat(is(":foo > filter")), Proposals.proposalThat(is(":foo > log")))); - assertThat(completionProvider.complete(":foo >", 1), not(hasItems(Proposals.proposalThat(is(":foo > http"))))); + assertThat(completionProvider.complete(":foo >", 1)).has(Proposals.proposalThatHasAll(":foo > filter", ":foo > log")); + assertThat(completionProvider.complete(":foo >", 1)).doNotHave(Proposals.proposalThatIs(":foo > http")); } @Test // :foo > ==> add app names public void testDestinationIntoAppsVariant() { - assertThat(completionProvider.complete(":foo >", 1), - hasItems(Proposals.proposalThat(is(":foo > filter")), Proposals.proposalThat(is(":foo > log")))); + assertThat(completionProvider.complete(":foo >", 1)).has(Proposals.proposalThatHasAll(":foo > filter", ":foo > log")); } @Test // http (no space) => NOT "http2: http" public void testAutomaticAppLabellingDoesNotGetInTheWay() { - assertThat(completionProvider.complete("http", 1), not(hasItems(Proposals.proposalThat(is("http2: http"))))); + assertThat(completionProvider.complete("http", 1)).doNotHave(Proposals.proposalThatIs("http2: http")); } @Test // http --use-ssl= => propose true|false public void testValueHintForBooleans() { - assertThat(completionProvider.complete("http --use-ssl=", 1), - hasItems(Proposals.proposalThat(is("http --use-ssl=true")), Proposals.proposalThat(is("http --use-ssl=false")))); + assertThat(completionProvider.complete("http --use-ssl=", 1)).has(Proposals.proposalThatHasAll("http --use-ssl=true", "http --use-ssl=false")); } @Test // .. foo --enum-value= => propose enum values public void testValueHintForEnums() { - assertThat(completionProvider.complete("http | filter --expresso=", 1), - hasItems(Proposals.proposalThat(is("http | filter --expresso=SINGLE")), - Proposals.proposalThat(is("http | filter --expresso=DOUBLE")))); + assertThat(completionProvider.complete("http | filter --expresso=", 1)).has(Proposals.proposalThatHasAll("http | filter --expresso=SINGLE","http | filter --expresso=DOUBLE")); } @Test public void testUnrecognizedPrefixesDontBlowUp() { - assertThat(completionProvider.complete("foo", 1), empty()); - assertThat(completionProvider.complete("foo --", 1), empty()); - assertThat(completionProvider.complete("http --notavalidoption", 1), empty()); - assertThat(completionProvider.complete("http --notavalidoption=", 1), empty()); - assertThat(completionProvider.complete("foo --some-option", 1), empty()); - assertThat(completionProvider.complete("foo --some-option=", 1), empty()); - assertThat(completionProvider.complete("foo --some-option=prefix", 1), empty()); + assertThat(completionProvider.complete("foo", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --", 1)).isEmpty(); + assertThat(completionProvider.complete("http --notavalidoption", 1)).isEmpty(); + assertThat(completionProvider.complete("http --notavalidoption=", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --some-option", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --some-option=", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --some-option=prefix", 1)).isEmpty(); assertThat( completionProvider.complete( - "http | filter --port=12 --expression=something " + "--expresso=not-a-valid-prefix", 1), - empty()); + "http | filter --port=12 --expression=something " + "--expresso=not-a-valid-prefix", 1) + ).isEmpty(); } /* @@ -198,8 +181,7 @@ public void testUnrecognizedPrefixesDontBlowUp() { */ @Test public void testClosedSetValuesShouldBeExclusive() { - assertThat(completionProvider.complete("http --use-ssl=tr", 1), - not(hasItems(Proposals.proposalThat(startsWith("http --use-ssl=tr --port"))))); + assertThat(completionProvider.complete("http --use-ssl=tr", 1)).doNotHave(Proposals.proposalThatStartsWith("http --use-ssl=tr --port")); } } diff --git a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/TaskCompletionProviderTests.java b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/TaskCompletionProviderTests.java index 08db8a78c5..938b7cde20 100644 --- a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/TaskCompletionProviderTests.java +++ b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/TaskCompletionProviderTests.java @@ -16,21 +16,15 @@ package org.springframework.cloud.dataflow.completion; -import org.junit.Test; -import org.junit.runner.RunWith; + +import org.assertj.core.api.Condition; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver; -import org.springframework.test.context.junit4.SpringRunner; - -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.hasItems; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.startsWith; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; /** * Integration tests for TaskCompletionProvider. *

@@ -43,9 +37,10 @@ * @author Eric Bottard * @author Mark Fisher * @author Andy Clement + * @author Corneil du Plessis */ @SuppressWarnings("unchecked") -@RunWith(SpringRunner.class) + @SpringBootTest(classes = { CompletionConfiguration.class, CompletionTestsMocks.class }, properties = { "spring.main.allow-bean-definition-overriding=true" }) public class TaskCompletionProviderTests { @@ -56,77 +51,71 @@ public class TaskCompletionProviderTests { @Test // => basic,plum,etc public void testEmptyStartShouldProposeSourceApps() { - assertThat(completionProvider.complete("", 1), hasItems(Proposals.proposalThat(is("basic")), Proposals.proposalThat(is("plum")))); - assertThat(completionProvider.complete("", 1), not(hasItems(Proposals.proposalThat(is("log"))))); + assertThat(completionProvider.complete("", 1)).has(Proposals.proposalThatHasAll("basic", "plum")); + assertThat(completionProvider.complete("", 1)).doNotHave(Proposals.proposalThatIs("log")); } @Test // b => basic public void testUnfinishedAppNameShouldReturnCompletions() { - assertThat(completionProvider.complete("b", 1), hasItems(Proposals.proposalThat(is("basic")))); - assertThat(completionProvider.complete("ba", 1), hasItems(Proposals.proposalThat(is("basic")))); - assertThat(completionProvider.complete("pl", 1), not(hasItems(Proposals.proposalThat(is("basic"))))); + assertThat(completionProvider.complete("b", 1)).have(Proposals.proposalThatIs("basic")); + assertThat(completionProvider.complete("ba", 1)).have(Proposals.proposalThatIs("basic")); + assertThat(completionProvider.complete("pl", 1)).doNotHave(Proposals.proposalThatIs("basic")); } @Test // basic => basic --foo=, etc public void testValidTaskDefinitionShouldReturnAppOptions() { - assertThat(completionProvider.complete("basic ", 1), - hasItems(Proposals.proposalThat(is("basic --expression=")), Proposals.proposalThat(is("basic --expresso=")))); + assertThat(completionProvider.complete("basic ", 1)).has(Proposals.proposalThatHasAny("basic --expression=", "basic --expresso=")); // Same as above, no final space - assertThat(completionProvider.complete("basic", 1), - hasItems(Proposals.proposalThat(is("basic --expression=")), Proposals.proposalThat(is("basic --expresso=")))); + assertThat(completionProvider.complete("basic", 1)).has(Proposals.proposalThatHasAll("basic --expression=", "basic --expresso=")); } @Test // file | filter - => file | filter --foo,etc public void testOneDashShouldReturnTwoDashes() { - assertThat(completionProvider.complete("basic -", 1), - hasItems(Proposals.proposalThat(is("basic --expression=")), Proposals.proposalThat(is("basic --expresso=")))); + assertThat(completionProvider.complete("basic -", 1)).has(Proposals.proposalThatHasAll("basic --expression=", "basic --expresso=")); } @Test // basic -- => basic --foo,etc public void testTwoDashesShouldReturnOptions() { - assertThat(completionProvider.complete("basic --", 1), - hasItems(Proposals.proposalThat(is("basic --expression=")), Proposals.proposalThat(is("basic --expresso=")))); + assertThat(completionProvider.complete("basic --", 1)).has(Proposals.proposalThatHasAll("basic --expression=", "basic --expresso=")); } @Test // file --p => file --preventDuplicates=, file --pattern= public void testUnfinishedOptionNameShouldComplete() { - assertThat(completionProvider.complete("basic --foo", 1), hasItems(Proposals.proposalThat(is("basic --fooble=")))); + assertThat(completionProvider.complete("basic --foo", 1)).have(Proposals.proposalThatIs("basic --fooble=")); } @Test // file | counter --name= => nothing public void testInGenericOptionValueCantProposeAnything() { - assertThat(completionProvider.complete("basic --expression=", 1), empty()); + assertThat(completionProvider.complete("basic --expression=", 1)).isEmpty(); } @Test // plum --use-ssl= => propose true|false public void testValueHintForBooleans() { - assertThat(completionProvider.complete("plum --use-ssl=", 1), - hasItems(Proposals.proposalThat(is("plum --use-ssl=true")), Proposals.proposalThat(is("plum --use-ssl=false")))); + assertThat(completionProvider.complete("plum --use-ssl=", 1)).has(Proposals.proposalThatHasAll("plum --use-ssl=true", "plum --use-ssl=false")); } @Test // basic --enum-value= => propose enum values public void testValueHintForEnums() { - assertThat(completionProvider.complete("basic --expresso=", 1), - hasItems(Proposals.proposalThat(is("basic --expresso=SINGLE")), Proposals.proposalThat(is("basic --expresso=DOUBLE")))); + assertThat(completionProvider.complete("basic --expresso=", 1)).has(Proposals.proposalThatHasAll("basic --expresso=SINGLE", "basic --expresso=DOUBLE")); } @Test public void testUnrecognizedPrefixesDontBlowUp() { - assertThat(completionProvider.complete("foo", 1), empty()); - assertThat(completionProvider.complete("foo --", 1), empty()); - assertThat(completionProvider.complete("http --notavalidoption", 1), empty()); - assertThat(completionProvider.complete("http --notavalidoption=", 1), empty()); - assertThat(completionProvider.complete("foo --some-option", 1), empty()); - assertThat(completionProvider.complete("foo --some-option=", 1), empty()); - assertThat(completionProvider.complete("foo --some-option=prefix", 1), empty()); + assertThat(completionProvider.complete("foo", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --", 1)).isEmpty(); + assertThat(completionProvider.complete("http --notavalidoption", 1)).isEmpty(); + assertThat(completionProvider.complete("http --notavalidoption=", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --some-option", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --some-option=", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --some-option=prefix", 1)).isEmpty(); } /* @@ -135,7 +124,7 @@ public void testUnrecognizedPrefixesDontBlowUp() { */ @Test public void testClosedSetValuesShouldBeExclusive() { - assertThat(completionProvider.complete("basic --expresso=s", 1), - not(hasItems(Proposals.proposalThat(startsWith("basic --expresso=s --fooble"))))); + assertThat(completionProvider + .complete("basic --expresso=s", 1)).doNotHave(Proposals.proposalThatStartsWith("basic --expresso=s --fooble")); } } diff --git a/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/README.txt b/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/README.txt deleted file mode 100644 index 9040967d87..0000000000 --- a/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/README.txt +++ /dev/null @@ -1 +0,0 @@ -The contents of these 2 boot uberjars has been created using the src/test/support maven project(s). diff --git a/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/boot13-1.0.0.BUILD-SNAPSHOT.jar b/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/boot13-1.0.0.BUILD-SNAPSHOT.jar deleted file mode 100644 index b542bf21fd..0000000000 Binary files a/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/boot13-1.0.0.BUILD-SNAPSHOT.jar and /dev/null differ diff --git a/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/boot14-1.0.0.BUILD-SNAPSHOT.jar b/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/boot14-1.0.0.BUILD-SNAPSHOT.jar deleted file mode 100644 index b169b3cb39..0000000000 Binary files a/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/boot14-1.0.0.BUILD-SNAPSHOT.jar and /dev/null differ diff --git a/spring-cloud-dataflow-completion/src/test/support/boot13/pom.xml b/spring-cloud-dataflow-completion/src/test/support/boot13/pom.xml deleted file mode 100644 index f2ac85f486..0000000000 --- a/spring-cloud-dataflow-completion/src/test/support/boot13/pom.xml +++ /dev/null @@ -1,61 +0,0 @@ - - - - - 4.0.0 - com.acme - boot13 - 1.0.0.BUILD-SNAPSHOT - - org.springframework.boot - spring-boot-starter-parent - 1.3.0.RELEASE - - - - UTF-8 - - - - - org.springframework.boot - spring-boot-starter - 1.3.0.RELEASE - - - org.springframework.boot - spring-boot-configuration-processor - 1.3.0.RELEASE - true - - - com.acme - common - 1.0.0.BUILD-SNAPSHOT - - - - - - org.springframework.boot - spring-boot-maven-plugin - 1.3.0.RELEASE - - - - diff --git a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/resources/META-INF/dataflow-configuration-metadata.properties b/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/resources/META-INF/dataflow-configuration-metadata.properties deleted file mode 100644 index d38019ab9e..0000000000 --- a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/resources/META-INF/dataflow-configuration-metadata.properties +++ /dev/null @@ -1,3 +0,0 @@ -configuration-properties.classes=\ - com.acme.boot13.MyConfigProperties13, \ - com.acme.common.ConfigProperties diff --git a/spring-cloud-dataflow-completion/src/test/support/boot14/pom.xml b/spring-cloud-dataflow-completion/src/test/support/boot14/pom.xml deleted file mode 100644 index abb354872f..0000000000 --- a/spring-cloud-dataflow-completion/src/test/support/boot14/pom.xml +++ /dev/null @@ -1,45 +0,0 @@ - - - 4.0.0 - com.acme - boot14 - 1.0.0.BUILD-SNAPSHOT - - org.springframework.boot - spring-boot-starter-parent - 1.4.0.RELEASE - - - - UTF-8 - - - - - org.springframework.boot - spring-boot-starter - 1.4.0.RELEASE - - - org.springframework.boot - spring-boot-configuration-processor - 1.4.0.RELEASE - true - - - com.acme - common - 1.0.0.BUILD-SNAPSHOT - - - - - - org.springframework.boot - spring-boot-maven-plugin - 1.4.0.RELEASE - - - - diff --git a/spring-cloud-dataflow-completion/src/test/support/boot14/src/main/resources/META-INF/dataflow-configuration-metadata.properties b/spring-cloud-dataflow-completion/src/test/support/boot14/src/main/resources/META-INF/dataflow-configuration-metadata.properties deleted file mode 100644 index 764fc7c1c5..0000000000 --- a/spring-cloud-dataflow-completion/src/test/support/boot14/src/main/resources/META-INF/dataflow-configuration-metadata.properties +++ /dev/null @@ -1,3 +0,0 @@ -configuration-properties.classes=\ - com.acme.boot14.MyConfigProperties14, \ - com.acme.common.ConfigProperties diff --git a/spring-cloud-dataflow-completion/src/test/support/common/pom.xml b/spring-cloud-dataflow-completion/src/test/support/common/pom.xml deleted file mode 100644 index 44ade52df7..0000000000 --- a/spring-cloud-dataflow-completion/src/test/support/common/pom.xml +++ /dev/null @@ -1,31 +0,0 @@ - - - 4.0.0 - com.acme - common - 1.0.0.BUILD-SNAPSHOT - - org.springframework.boot - spring-boot-starter-parent - 1.3.0.RELEASE - - - - UTF-8 - - - - - org.springframework.boot - spring-boot-starter - 1.3.0.RELEASE - - - org.springframework.boot - spring-boot-configuration-processor - 1.3.0.RELEASE - true - - - diff --git a/spring-cloud-dataflow-completion/src/test/support/pom.xml b/spring-cloud-dataflow-completion/src/test/support/pom.xml deleted file mode 100644 index 583fd030b1..0000000000 --- a/spring-cloud-dataflow-completion/src/test/support/pom.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - 4.0.0 - com.acme - parent - 1.0.0.BUILD-SNAPSHOT - pom - - UTF-8 - - - - common - boot13 - boot14 - - diff --git a/spring-cloud-dataflow-composed-task-runner/README.adoc b/spring-cloud-dataflow-composed-task-runner/README.adoc index 74a5d5db84..144a11ea8c 100644 --- a/spring-cloud-dataflow-composed-task-runner/README.adoc +++ b/spring-cloud-dataflow-composed-task-runner/README.adoc @@ -123,6 +123,22 @@ definitions fails in the split then DDD and EEE will not fire. For example if BBB fails then AAA and CCC will be marked successful and BBB will be marked a failure and DDD and EEE will not be launched. +If any child task within a split returns an `ExitMessage` other than `COMPLETED` the split +will have an `ExitStatus` of `FAILED`. To ignore the `ExitMessage` of a child task, +add the `ignoreExitMessage=true` for each app that will return an `ExitMessage` +within the split. When using this flag, the `ExitStatus` of the task will be +`COMPLETED` if the `ExitCode` of the child task is zero. The split will have an +`ExitStatus` of `FAILED` if the `ExitCode`s is non zero. There are 2 ways to +set the `ignoreExitMessage` flag: + +1. Setting the property for each of the apps that need to have their exitMessage +ignored within the split. For example a split like `` where `BBB` +will return an `exitMessage`, you would set the `ignoreExitMessage` property like +`app.BBB.ignoreExitMessage=true` + +2. You can also set it for all apps using the composed-task-arguments property, +for example: `--composed-task-arguments=--ignoreExitMessage=true`. + == Configuration See the https://docs.spring.io/spring-cloud-dataflow/docs/current/reference/htmlsingle/#spring-cloud-dataflow-composed-tasks[Configuring Composed Task Runner] in the Spring Cloud Data Flow reference guide. diff --git a/spring-cloud-dataflow-composed-task-runner/pom.xml b/spring-cloud-dataflow-composed-task-runner/pom.xml index 4f52abe270..4ce19960c6 100644 --- a/spring-cloud-dataflow-composed-task-runner/pom.xml +++ b/spring-cloud-dataflow-composed-task-runner/pom.xml @@ -1,15 +1,26 @@ - + 4.0.0 org.springframework.cloud spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent + 2.11.6-SNAPSHOT + org.springframework.cloud spring-cloud-dataflow-composed-task-runner + spring-cloud-dataflow-composed-task-runner + Spring Cloud Data Flow Composed Task Runner + jar + 1.8 3.3.0 + true + 3.4.1 @@ -20,6 +31,7 @@ org.springframework.cloud spring-cloud-dataflow-rest-client + ${project.version} io.pivotal.cfenv @@ -32,6 +44,7 @@ org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.cloud @@ -44,6 +57,7 @@ org.mariadb.jdbc mariadb-java-client + [3.1.2,) org.postgresql @@ -76,21 +90,10 @@ javax.xml.bind jaxb-api - - org.assertj - assertj-core - test - org.springframework.boot spring-boot-starter-test test - - - org.junit.vintage - junit-vintage-engine - - org.codehaus.plexus @@ -100,13 +103,21 @@ org.springframework.cloud - spring-cloud-starter-common-security-config-web + spring-cloud-common-security-config-web test + ${project.version} + + + io.micrometer + micrometer-registry-prometheus io.micrometer.prometheus prometheus-rsocket-spring - test + + + io.micrometer.prometheus + prometheus-rsocket-client @@ -114,6 +125,46 @@ org.springframework.boot spring-boot-maven-plugin + + + + repackage + + + + + + springcloud/${project.artifactId}:${project.version} + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedBatchConfigurer.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedBatchConfigurer.java index 9a3a12a61a..f98b372ce9 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedBatchConfigurer.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedBatchConfigurer.java @@ -16,31 +16,22 @@ package org.springframework.cloud.dataflow.composedtaskrunner; -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.ResultSet; -import java.sql.SQLException; +import javax.sql.DataSource; import java.util.HashMap; import java.util.Map; -import javax.sql.DataSource; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; -import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; -import org.springframework.batch.support.DatabaseType; import org.springframework.boot.autoconfigure.batch.BasicBatchConfigurer; import org.springframework.boot.autoconfigure.batch.BatchProperties; import org.springframework.boot.autoconfigure.transaction.TransactionManagerCustomizers; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskException; -import org.springframework.cloud.dataflow.composedtaskrunner.support.SqlServerSequenceMaxValueIncrementer; -import org.springframework.jdbc.support.MetaDataAccessException; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory; import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; -import org.springframework.util.StringUtils; /** * A BatchConfigurer for CTR that will establish the transaction isolation level to ISOLATION_REPEATABLE_READ by default. @@ -64,6 +55,7 @@ public class ComposedBatchConfigurer extends BasicBatchConfigurer { * @param dataSource the underlying data source * @param transactionManagerCustomizers transaction manager customizers (or * {@code null}) + * @param composedTaskProperties composed task properties */ protected ComposedBatchConfigurer(BatchProperties properties, DataSource dataSource, TransactionManagerCustomizers transactionManagerCustomizers, ComposedTaskProperties composedTaskProperties) { @@ -80,13 +72,7 @@ protected JobRepository createJobRepository() { @Override public JobRepository getJobRepository() { JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean(); - DefaultDataFieldMaxValueIncrementerFactory incrementerFactory = - new DefaultDataFieldMaxValueIncrementerFactory(this.incrementerDataSource) { - @Override - public DataFieldMaxValueIncrementer getIncrementer(String incrementerType, String incrementerName) { - return getIncrementerForApp(incrementerName); - } - }; + MultiSchemaIncrementerFactory incrementerFactory = new MultiSchemaIncrementerFactory(this.incrementerDataSource); factory.setIncrementerFactory(incrementerFactory); factory.setDataSource(this.incrementerDataSource); factory.setTransactionManager(this.getTransactionManager()); @@ -99,71 +85,4 @@ public DataFieldMaxValueIncrementer getIncrementer(String incrementerType, Strin throw new ComposedTaskException(exception.getMessage()); } } - - private DataFieldMaxValueIncrementer getIncrementerForApp(String incrementerName) { - - DefaultDataFieldMaxValueIncrementerFactory incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(this.incrementerDataSource); - DataFieldMaxValueIncrementer incrementer = null; - if(incrementerMap.containsKey(incrementerName)) { - return incrementerMap.get(incrementerName); - } - if (this.incrementerDataSource != null) { - String databaseType; - try { - databaseType = DatabaseType.fromMetaData(this.incrementerDataSource).name(); - } - catch (MetaDataAccessException e) { - throw new IllegalStateException(e); - } - if (StringUtils.hasText(databaseType) && databaseType.equals("SQLSERVER")) { - if (!isSqlServerTableSequenceAvailable(incrementerName)) { - incrementer = new SqlServerSequenceMaxValueIncrementer(this.incrementerDataSource, incrementerName); - incrementerMap.put(incrementerName, incrementer); - } - } - } - if (incrementer == null) { - try { - incrementer = incrementerFactory.getIncrementer(DatabaseType.fromMetaData(this.incrementerDataSource).name(), incrementerName); - incrementerMap.put(incrementerName, incrementer); - } - catch (Exception exception) { - logger.warn(exception.getMessage(), exception); - } - } - return incrementer; - } - - private boolean isSqlServerTableSequenceAvailable(String incrementerName) { - boolean result = false; - DatabaseMetaData metaData; - Connection connection = null; - try { - connection = this.incrementerDataSource.getConnection(); - metaData = connection.getMetaData(); - String[] types = {"TABLE"}; - ResultSet tables = metaData.getTables(null, null, "%", types); - while (tables.next()) { - if (tables.getString("TABLE_NAME").equals(incrementerName)) { - result = true; - break; - } - } - } - catch (SQLException sqe) { - logger.warn(sqe.getMessage(), sqe); - } - finally { - if(connection != null) { - try { - connection.close(); - } - catch (SQLException sqe) { - logger.warn(sqe.getMessage(), sqe); - } - } - } - return result; - } - } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfiguration.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfiguration.java index 950f8e2490..4aa96b7fc7 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfiguration.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,15 @@ package org.springframework.cloud.dataflow.composedtaskrunner; import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.batch.core.StepExecutionListener; import org.springframework.batch.core.configuration.annotation.BatchConfigurer; @@ -25,18 +34,28 @@ import org.springframework.boot.autoconfigure.transaction.TransactionManagerCustomizers; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; +import org.springframework.cloud.dataflow.core.RelaxedNames; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaTaskExecutionDaoFactoryBean; +import org.springframework.cloud.dataflow.core.dsl.TaskParser; +import org.springframework.cloud.dataflow.rest.util.DeploymentPropertiesUtils; import org.springframework.cloud.task.configuration.EnableTask; +import org.springframework.cloud.task.listener.TaskExecutionListener; import org.springframework.cloud.task.repository.TaskExplorer; +import org.springframework.cloud.task.repository.support.SimpleTaskExplorer; +import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; +import org.springframework.core.env.Environment; import org.springframework.core.task.TaskExecutor; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; +import org.springframework.util.StringUtils; /** * Configures the Job that will execute the Composed Task Execution. * * @author Glenn Renfro + * @author Corneil du Plessis */ @EnableBatchProcessing @EnableTask @@ -44,16 +63,78 @@ @Configuration @Import(org.springframework.cloud.dataflow.composedtaskrunner.StepBeanDefinitionRegistrar.class) public class ComposedTaskRunnerConfiguration { + private final static Logger logger = LoggerFactory.getLogger(ComposedTaskRunnerConfiguration.class); + + @Bean + public TaskExecutionListener taskExecutionListener() { + return new ComposedTaskRunnerTaskListener(); + } @Bean - public StepExecutionListener composedTaskStepExecutionListener(TaskExplorer taskExplorer){ - return new org.springframework.cloud.dataflow.composedtaskrunner.ComposedTaskStepExecutionListener(taskExplorer); + public StepExecutionListener composedTaskStepExecutionListener(TaskExplorerContainer taskExplorerContainer) { + return new org.springframework.cloud.dataflow.composedtaskrunner.ComposedTaskStepExecutionListener(taskExplorerContainer); } @Bean - public org.springframework.cloud.dataflow.composedtaskrunner.ComposedRunnerJobFactory composedTaskJob(ComposedTaskProperties properties) { + TaskExplorerContainer taskExplorerContainer(TaskExplorer taskExplorer, DataSource dataSource, ComposedTaskProperties properties, Environment env) { + Map explorers = new HashMap<>(); + String ctrName = env.getProperty("spring.cloud.task.name"); + if (!StringUtils.hasText(ctrName)) { + throw new IllegalStateException("spring.cloud.task.name property must have a value."); + } + TaskParser parser = new TaskParser("ctr", properties.getGraph(), false, true); + StepBeanDefinitionRegistrar.TaskAppsMapCollector collector = new StepBeanDefinitionRegistrar.TaskAppsMapCollector(); + parser.parse().accept(collector); + Set taskNames = collector.getTaskApps().keySet(); + logger.debug("taskExplorerContainer:taskNames:{}", taskNames); + for (String taskName : taskNames) { + addTaskExplorer(dataSource, properties, env, explorers, taskName); + String appName = taskName.replace(ctrName + "-", ""); + addTaskExplorer(dataSource, properties, env, explorers, appName); + if(taskName.length() > ctrName.length()) { + String shortTaskName = taskName.substring(ctrName.length() + 1); + addTaskExplorer(dataSource, properties, env, explorers, shortTaskName); + } + } + return new TaskExplorerContainer(explorers, taskExplorer); + } + + private static void addTaskExplorer( + DataSource dataSource, + ComposedTaskProperties properties, + Environment env, + Map explorers, + String taskName + ) { + logger.debug("addTaskExplorer:{}", taskName); + List propertyNames = new ArrayList<>(); + RelaxedNames relaxedNames = RelaxedNames.forCamelCase("tablePrefix"); + relaxedNames.forEach(tablePrefix -> propertyNames.add( + String.format("app.%s.spring.cloud.task.%s", taskName, tablePrefix))); + Map taskDeploymentProperties = + DeploymentPropertiesUtils.parse(properties.getComposedTaskProperties()); + String prefix = propertyNames.stream() + .map(propertyName -> { + String prefixOfComposedTaskProperties = taskDeploymentProperties.get(propertyName); + if(prefixOfComposedTaskProperties == null) { + prefixOfComposedTaskProperties = properties.getComposedTaskAppProperties().get(propertyName); + } + return prefixOfComposedTaskProperties == null ? env.getProperty(propertyName) : prefixOfComposedTaskProperties; + }) + .filter(Objects::nonNull) + .findFirst().orElse(null); + if (prefix != null) { + TaskExecutionDaoFactoryBean factoryBean = new MultiSchemaTaskExecutionDaoFactoryBean(dataSource, prefix); + logger.info("taskExplorerContainer:adding:{}:{}", taskName, prefix); + explorers.put(taskName, new SimpleTaskExplorer(factoryBean)); + } else { + logger.warn("Cannot find {} in {} ", propertyNames, properties.getComposedTaskAppProperties()); + } + } - return new org.springframework.cloud.dataflow.composedtaskrunner.ComposedRunnerJobFactory(properties); + @Bean + public ComposedRunnerJobFactory composedTaskJob(ComposedTaskProperties properties) { + return new ComposedRunnerJobFactory(properties); } @Bean @@ -71,10 +152,17 @@ public TaskExecutor taskExecutor(ComposedTaskProperties properties) { } @Bean - public BatchConfigurer getComposedBatchConfigurer(BatchProperties properties, - DataSource dataSource, TransactionManagerCustomizers transactionManagerCustomizers, - ComposedTaskProperties composedTaskProperties) { - return new org.springframework.cloud.dataflow.composedtaskrunner.ComposedBatchConfigurer(properties, - dataSource, transactionManagerCustomizers, composedTaskProperties); + public BatchConfigurer getComposedBatchConfigurer( + BatchProperties properties, + DataSource dataSource, + TransactionManagerCustomizers transactionManagerCustomizers, + ComposedTaskProperties composedTaskProperties + ) { + return new ComposedBatchConfigurer( + properties, + dataSource, + transactionManagerCustomizers, + composedTaskProperties + ); } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactory.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactory.java index 91662f08f7..789cc83ed4 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactory.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactory.java @@ -23,6 +23,9 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -33,8 +36,10 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; import org.springframework.cloud.dataflow.core.Base64Utils; -import org.springframework.cloud.task.configuration.TaskConfigurer; +import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; import org.springframework.cloud.task.configuration.TaskProperties; +import org.springframework.core.env.Environment; +import org.springframework.hateoas.mediatype.hal.Jackson2HalModule; import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient; import org.springframework.security.oauth2.client.endpoint.OAuth2ClientCredentialsGrantRequest; import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository; @@ -49,19 +54,20 @@ * * @author Glenn Renfro * @author Michael Minella + * @author Corneil du Plessis */ public class ComposedTaskRunnerStepFactory implements FactoryBean { - private final static Logger log = LoggerFactory.getLogger(ComposedTaskRunnerStepFactory.class); + private final static Logger logger = LoggerFactory.getLogger(ComposedTaskRunnerStepFactory.class); @Autowired private ComposedTaskProperties composedTaskProperties; private ComposedTaskProperties composedTaskPropertiesFromEnv; - private String taskName; + private final String taskName; - private String taskNameId; + private final String taskNameId; private Map taskSpecificProps = new HashMap<>(); @@ -74,7 +80,7 @@ public class ComposedTaskRunnerStepFactory implements FactoryBean { private StepExecutionListener composedTaskStepExecutionListener; @Autowired - private TaskConfigurer taskConfigurer; + private TaskExplorerContainer taskExplorerContainer; @Autowired private TaskProperties taskProperties; @@ -85,8 +91,15 @@ public class ComposedTaskRunnerStepFactory implements FactoryBean { @Autowired(required = false) private OAuth2AccessTokenResponseClient clientCredentialsTokenResponseClient; + @Autowired(required = false) + private ObjectMapper mapper; + + @Autowired + private Environment environment; + public ComposedTaskRunnerStepFactory( - ComposedTaskProperties composedTaskPropertiesFromEnv, String taskName, String taskNameId) { + ComposedTaskProperties composedTaskPropertiesFromEnv, String taskName, String taskNameId + ) { Assert.notNull(composedTaskPropertiesFromEnv, "composedTaskProperties must not be null"); Assert.hasText(taskName, "taskName must not be empty nor null"); @@ -97,53 +110,66 @@ public ComposedTaskRunnerStepFactory( } public void setTaskSpecificProps(Map taskSpecificProps) { - if(taskSpecificProps != null) { + if (taskSpecificProps != null) { this.taskSpecificProps = taskSpecificProps; } } public void setArguments(List arguments) { - if(arguments != null) { + if (arguments != null) { this.arguments = arguments; } } @Override - public Step getObject() throws Exception { - + public Step getObject() { + if (this.mapper == null) { + this.mapper = new ObjectMapper(); + this.mapper.registerModule(new Jdk8Module()); + this.mapper.registerModule(new Jackson2HalModule()); + this.mapper.registerModule(new JavaTimeModule()); + this.mapper.registerModule(new Jackson2DataflowModule()); + } TaskLauncherTasklet taskLauncherTasklet = new TaskLauncherTasklet( - this.clientRegistrations, this.clientCredentialsTokenResponseClient, taskConfigurer.getTaskExplorer(), - this.composedTaskPropertiesFromEnv, this.taskName, taskProperties); - - List argumentsFromAppProperties = Base64Utils - .decodeMap(this.composedTaskProperties.getComposedTaskAppArguments()).entrySet().stream() - .filter(e -> e.getKey().startsWith("app." + taskNameId) || e.getKey().startsWith("app.*.")) - .map(e -> e.getValue()) - .collect(Collectors.toList()); + this.clientRegistrations, + this.clientCredentialsTokenResponseClient, + this.taskExplorerContainer.get(this.taskNameId), + this.composedTaskPropertiesFromEnv, + this.taskName, + taskProperties, + environment, this.mapper); + + List argumentsFromAppProperties = Base64Utils.decodeMap(this.composedTaskProperties.getComposedTaskAppArguments()) + .entrySet() + .stream() + .filter(e -> e.getKey().startsWith("app." + taskNameId + ".") || e.getKey().startsWith("app.*.")) + .map(Map.Entry::getValue) + .collect(Collectors.toList()); List argumentsToUse = Stream.concat(this.arguments.stream(), argumentsFromAppProperties.stream()) - .collect(Collectors.toList()); + .collect(Collectors.toList()); taskLauncherTasklet.setArguments(argumentsToUse); - log.debug("decoded composed-task-app-properties {}", composedTaskProperties.getComposedTaskAppProperties()); + logger.debug("decoded composed-task-app-properties {}", composedTaskProperties.getComposedTaskAppProperties()); Map propertiesFrom = Base64Utils .decodeMap(this.composedTaskProperties.getComposedTaskAppProperties()).entrySet().stream() - .filter(e -> e.getKey().startsWith("app." + taskNameId) - || e.getKey().startsWith("deployer." + taskNameId) || e.getKey().startsWith("deployer.*")) - .collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue())); + .filter(e -> + e.getKey().startsWith("app." + taskNameId + ".") || + e.getKey().startsWith("app.*.") || + e.getKey().startsWith("deployer." + taskNameId + ".") || + e.getKey().startsWith("deployer.*.")) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); Map propertiesToUse = new HashMap<>(); propertiesToUse.putAll(this.taskSpecificProps); propertiesToUse.putAll(propertiesFrom); taskLauncherTasklet.setProperties(propertiesToUse); - log.debug("Properties to use {}", propertiesToUse); - - String stepName = this.taskName; + logger.debug("Properties to use {}", propertiesToUse); - return this.steps.get(stepName) + return this.steps.get(this.taskName) .tasklet(taskLauncherTasklet) .transactionAttribute(getTransactionAttribute()) .listener(this.composedTaskStepExecutionListener) @@ -156,7 +182,7 @@ public Step getObject() throws Exception { * what is in its transaction. By setting isolation to READ_COMMITTED * The task launcher can see latest state of the db. Since the changes * to the task execution are done by the tasks. - + * * @return DefaultTransactionAttribute with isolation set to READ_COMMITTED. */ private TransactionAttribute getTransactionAttribute() { @@ -172,8 +198,4 @@ public Class getObjectType() { return Step.class; } - @Override - public boolean isSingleton() { - return true; - } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerTaskListener.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerTaskListener.java new file mode 100644 index 0000000000..c9f487df05 --- /dev/null +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerTaskListener.java @@ -0,0 +1,23 @@ +package org.springframework.cloud.dataflow.composedtaskrunner; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.task.listener.TaskExecutionListenerSupport; +import org.springframework.cloud.task.repository.TaskExecution; + +public class ComposedTaskRunnerTaskListener extends TaskExecutionListenerSupport { + private final static Logger logger = LoggerFactory.getLogger(ComposedTaskRunnerTaskListener.class); + + private static Long executionId = null; + + @Override + public void onTaskStartup(TaskExecution taskExecution) { + executionId = taskExecution.getExecutionId(); + logger.info("onTaskStartup:executionId={}", executionId); + } + + public static Long getExecutionId() { + return executionId; + } +} diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListener.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListener.java index 50dee971ff..d494195569 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListener.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListener.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,8 +16,8 @@ package org.springframework.cloud.dataflow.composedtaskrunner; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.StepExecution; @@ -33,52 +33,68 @@ * exit code. * * @author Glenn Renfro + * @author Corneil du Plessis */ public class ComposedTaskStepExecutionListener extends StepExecutionListenerSupport { + private final static Logger logger = LoggerFactory.getLogger(ComposedTaskStepExecutionListener.class); - private TaskExplorer taskExplorer; + private final TaskExplorerContainer taskExplorerContainer; - private static final Log logger = LogFactory.getLog(org.springframework.cloud.dataflow.composedtaskrunner.ComposedTaskStepExecutionListener.class); - - public ComposedTaskStepExecutionListener(TaskExplorer taskExplorer) { - Assert.notNull(taskExplorer, "taskExplorer must not be null."); - this.taskExplorer = taskExplorer; + public ComposedTaskStepExecutionListener(TaskExplorerContainer taskExplorerContainer) { + Assert.notNull(taskExplorerContainer, "taskExplorerContainer must not be null."); + this.taskExplorerContainer = taskExplorerContainer; + logger.info("ComposedTaskStepExecutionListener supporting {}", taskExplorerContainer.getKeys()); } /** * If endTime for task is null then the ExitStatus will be set to UNKNOWN. * If an exitMessage is returned by the TaskExecution then the exit status - * returned will be the ExitMessage. If no exitMessage is set for the task execution and the - * task returns an exitCode ! = to zero an exit status of FAILED is - * returned. If no exit message is set and the exit code of the task is - * zero then the ExitStatus of COMPLETED is returned. + * returned will be the ExitMessage. If no exitMessage is set for the task execution or + * {@link TaskLauncherTasklet#IGNORE_EXIT_MESSAGE_PROPERTY} is set to true as a task property + * and the task returns an exitCode != to zero an exit status of FAILED is + * returned. If no exit message is set or + * {@link TaskLauncherTasklet#IGNORE_EXIT_MESSAGE_PROPERTY} is set to true as a task property + * and the exit code of the task is zero then the ExitStatus of COMPLETED is returned. + * * @param stepExecution The stepExecution that kicked of the Task. * @return ExitStatus of COMPLETED else FAILED. */ @Override public ExitStatus afterStep(StepExecution stepExecution) { + logger.info("AfterStep processing for stepExecution {}:{}", stepExecution.getStepName(), stepExecution.getJobExecutionId()); ExitStatus result = ExitStatus.COMPLETED; - logger.info(String.format("AfterStep processing for stepExecution %s", - stepExecution.getStepName())); - Long executionId = (Long) stepExecution.getExecutionContext().get("task-execution-id"); - Assert.notNull(executionId, "TaskLauncherTasklet did not " + - "return a task-execution-id. Check to see if task " + - "exists."); - - TaskExecution resultExecution = this.taskExplorer.getTaskExecution(executionId); - - if (!StringUtils.isEmpty(resultExecution.getExitMessage())) { - result = new ExitStatus(resultExecution.getExitMessage()); + Assert.notNull(executionId, "TaskLauncherTasklet for job " + stepExecution.getJobExecutionId() + + " did not return a task-execution-id. Check to see if task exists."); + String schemaTarget = stepExecution.getExecutionContext().getString("schema-target"); + String taskName = stepExecution.getExecutionContext().getString("task-name"); + Assert.notNull(taskName, "TaskLauncherTasklet for job " + stepExecution.getJobExecutionId() + + " did not return a task-name. Check to see if task exists."); + String explorerName = taskName; + if (!this.taskExplorerContainer.getKeys().contains(taskName)) { + Assert.notNull(schemaTarget, "TaskLauncherTasklet for job " + stepExecution.getJobExecutionId() + + " did not return a schema-target. Check to see if task exists."); + explorerName = schemaTarget; } - else if (resultExecution.getExitCode() != 0) { + logger.info("AfterStep for {}:{}:{}:{}:{}", stepExecution.getStepName(), stepExecution.getJobExecutionId(), taskName, executionId, schemaTarget); + TaskExplorer taskExplorer = this.taskExplorerContainer.get(explorerName); + TaskExecution resultExecution = taskExplorer.getTaskExecution(executionId); + if (!stepExecution.getExecutionContext().containsKey(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE) && + StringUtils.hasText(resultExecution.getExitMessage())) { + result = new ExitStatus(resultExecution.getExitMessage()); + } else if (resultExecution.getExitCode() != 0) { result = ExitStatus.FAILED; } - - logger.info(String.format("AfterStep processing complete for " + - "stepExecution %s with taskExecution %s", - stepExecution.getStepName(), executionId)); + logger.info("AfterStep processing complete for stepExecution {} with taskExecution {}:{}:{}:{}", stepExecution.getStepName(), stepExecution.getJobExecutionId(), taskName, executionId, schemaTarget); return result; } + @Override + public void beforeStep(StepExecution stepExecution) { + logger.info("beforeStep:{}:{}>>>>", stepExecution.getStepName(), stepExecution.getJobExecutionId()); + super.beforeStep(stepExecution); + logger.debug("beforeStep:{}", stepExecution.getExecutionContext()); + logger.info("beforeStep:{}:{}<<<", stepExecution.getStepName(), stepExecution.getJobExecutionId()); + + } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/DataFlowConfiguration.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/DataFlowConfiguration.java index 4d69835394..c6ded5e05c 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/DataFlowConfiguration.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/DataFlowConfiguration.java @@ -48,6 +48,7 @@ public InMemoryClientRegistrationRepository clientRegistrationRepository( .clientId(properties.getOauth2ClientCredentialsClientId()) .clientSecret(properties.getOauth2ClientCredentialsClientSecret()) .scope(properties.getOauth2ClientCredentialsScopes()) + .clientAuthenticationMethod(properties.getOauth2ClientCredentialsClientAuthenticationMethod()) .build(); return new InMemoryClientRegistrationRepository(clientRegistration); } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/StepBeanDefinitionRegistrar.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/StepBeanDefinitionRegistrar.java index 2a3d749b43..fd19bef8d6 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/StepBeanDefinitionRegistrar.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/StepBeanDefinitionRegistrar.java @@ -38,6 +38,7 @@ import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; import org.springframework.core.env.Environment; import org.springframework.core.type.AnnotationMetadata; +import org.springframework.security.oauth2.core.ClientAuthenticationMethod; import org.springframework.util.StringUtils; /** @@ -207,6 +208,10 @@ private ComposedTaskProperties composedTaskProperties() { if (dataFlowUriString != null) { properties.setDataflowServerUri(URI.create(dataFlowUriString)); } + String maxStartWaitTime = getPropertyValue("max-start-wait-time"); + if (maxStartWaitTime != null) { + properties.setMaxStartWaitTime(Integer.parseInt(maxStartWaitTime)); + } String maxWaitTime = getPropertyValue("max-wait-time"); if (maxWaitTime != null) { properties.setMaxWaitTime(Integer.parseInt(maxWaitTime)); @@ -224,6 +229,12 @@ private ComposedTaskProperties composedTaskProperties() { properties.setDataflowServerUsername(getPropertyValue("dataflow-server-username")); properties.setOauth2ClientCredentialsClientId(getPropertyValue("oauth2-client-credentials-client-id")); properties.setOauth2ClientCredentialsClientSecret(getPropertyValue("oauth2-client-credential-client-secret")); + + String oauth2ClientCredentialsClientAuthenticationMethodAsString = getPropertyValue("oauth2-client-credential-client-authentication-method"); + if (oauth2ClientCredentialsClientAuthenticationMethodAsString != null) { + properties.setOauth2ClientCredentialsClientAuthenticationMethod(new ClientAuthenticationMethod(oauth2ClientCredentialsClientAuthenticationMethodAsString)); + } + properties.setOauth2ClientCredentialsScopes(StringUtils.commaDelimitedListToSet(getPropertyValue("oauth2-client-credentials-scopes"))); return properties; } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskExplorerContainer.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskExplorerContainer.java new file mode 100644 index 0000000000..4cd95b1727 --- /dev/null +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskExplorerContainer.java @@ -0,0 +1,59 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.composedtaskrunner; + +import java.util.Map; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.task.repository.TaskExplorer; + +/** + * A container for the TaskExplorers for each Task by name. + * @author Corneil du Plessis + */ +public class TaskExplorerContainer { + private final static Logger logger = LoggerFactory.getLogger(TaskExplorerContainer.class); + + private final Map taskExplorers; + + private final TaskExplorer defaultTaskExplorer; + + public TaskExplorerContainer(Map taskExplorers, TaskExplorer defaultTaskExplorer) { + this.taskExplorers = taskExplorers; + this.defaultTaskExplorer = defaultTaskExplorer; + + } + + public TaskExplorer get(String name) { + TaskExplorer result = taskExplorers.get(name); + if (result == null) { + result = taskExplorers.get(SchemaVersionTarget.defaultTarget().getName()); + } + if(result == null) { + logger.warn("Cannot find TaskExplorer for {}. Using default", name); + result = defaultTaskExplorer; + } + return result; + } + public Set getKeys() { + return taskExplorers.keySet(); + } +} diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTasklet.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTasklet.java index 4cc2517e4e..7a8696b511 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTasklet.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTasklet.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,26 +21,38 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.UnexpectedJobExecutionException; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.item.ExecutionContext; import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.boot.context.properties.bind.Bindable; +import org.springframework.boot.context.properties.bind.Binder; +import org.springframework.boot.context.properties.source.MapConfigurationPropertySource; import org.springframework.cloud.common.security.core.support.OAuth2AccessTokenProvidingClientHttpRequestInterceptor; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskException; import org.springframework.cloud.dataflow.composedtaskrunner.support.TaskExecutionTimeoutException; +import org.springframework.cloud.dataflow.composedtaskrunner.support.UnexpectedTaskExecutionException; import org.springframework.cloud.dataflow.rest.client.DataFlowOperations; import org.springframework.cloud.dataflow.rest.client.DataFlowTemplate; import org.springframework.cloud.dataflow.rest.client.TaskOperations; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; +import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; import org.springframework.cloud.dataflow.rest.util.HttpClientConfigurer; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; +import org.springframework.core.env.Environment; +import org.springframework.hateoas.mediatype.hal.Jackson2HalModule; +import org.springframework.lang.Nullable; import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient; import org.springframework.security.oauth2.client.endpoint.OAuth2ClientCredentialsGrantRequest; import org.springframework.security.oauth2.client.registration.ClientRegistration; @@ -53,44 +65,64 @@ /** * Executes task launch request using Spring Cloud Data Flow's Restful API * then returns the execution id once the task launched. - * + *

* Note: This class is not thread-safe and as such should not be used as a singleton. * * @author Glenn Renfro */ public class TaskLauncherTasklet implements Tasklet { + final static String IGNORE_EXIT_MESSAGE = "IGNORE_EXIT_MESSAGE"; + + final static String IGNORE_EXIT_MESSAGE_PROPERTY = "ignore-exit-message"; - private ComposedTaskProperties composedTaskProperties; + private final ComposedTaskProperties composedTaskProperties; - private TaskExplorer taskExplorer; + private final TaskExplorer taskExplorer; private Map properties; private List arguments; - private String taskName; + private final String taskName; - private static final Log logger = LogFactory.getLog(org.springframework.cloud.dataflow.composedtaskrunner.TaskLauncherTasklet.class); + private static final Logger logger = LoggerFactory.getLogger(TaskLauncherTasklet.class); private Long executionId; + private final String ctrSchemaTarget; + + private long startTimeout; + private long timeout; - private ClientRegistrationRepository clientRegistrations; + private final ClientRegistrationRepository clientRegistrations; - private OAuth2AccessTokenResponseClient clientCredentialsTokenResponseClient; + private final OAuth2AccessTokenResponseClient clientCredentialsTokenResponseClient; private TaskOperations taskOperations; TaskProperties taskProperties; + private final ObjectMapper mapper; public TaskLauncherTasklet( ClientRegistrationRepository clientRegistrations, OAuth2AccessTokenResponseClient clientCredentialsTokenResponseClient, TaskExplorer taskExplorer, - ComposedTaskProperties composedTaskProperties, String taskName, - TaskProperties taskProperties) { + ComposedTaskProperties composedTaskProperties, + String taskName, + TaskProperties taskProperties, + Environment environment, + @Nullable ObjectMapper mapper + ) { + if (mapper == null) { + mapper = new ObjectMapper(); + mapper.registerModule(new Jdk8Module()); + mapper.registerModule(new Jackson2HalModule()); + mapper.registerModule(new JavaTimeModule()); + mapper.registerModule(new Jackson2DataflowModule()); + } + this.mapper = mapper; Assert.hasText(taskName, "taskName must not be empty nor null."); Assert.notNull(taskExplorer, "taskExplorer must not be null."); Assert.notNull(composedTaskProperties, @@ -102,22 +134,21 @@ public TaskLauncherTasklet( this.taskProperties = taskProperties; this.clientRegistrations = clientRegistrations; this.clientCredentialsTokenResponseClient = clientCredentialsTokenResponseClient; + this.ctrSchemaTarget = environment.getProperty("spring.cloud.task.schemaTarget"); } public void setProperties(Map properties) { - if(properties != null) { + if (properties != null) { this.properties = properties; - } - else { + } else { this.properties = new HashMap<>(0); } } public void setArguments(List arguments) { - if(arguments != null) { + if (arguments != null) { this.arguments = arguments; - } - else { + } else { this.arguments = new ArrayList<>(0); } } @@ -131,12 +162,15 @@ public void setArguments(List arguments) { * @return Repeat status of FINISHED. */ @Override - public RepeatStatus execute(StepContribution contribution, - ChunkContext chunkContext) { + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) { TaskOperations taskOperations = taskOperations(); if (this.executionId == null) { + this.startTimeout = System.currentTimeMillis() + + this.composedTaskProperties.getMaxStartWaitTime(); this.timeout = System.currentTimeMillis() + this.composedTaskProperties.getMaxWaitTime(); + logger.debug("Wait time for this task to start is " + + this.composedTaskProperties.getMaxStartWaitTime()); logger.debug("Wait time for this task to complete is " + this.composedTaskProperties.getMaxWaitTime()); logger.debug("Interval check time for this task to complete is " + @@ -153,48 +187,75 @@ public RepeatStatus execute(StepContribution contribution, args = (List) stepExecutionContext.get("task-arguments"); } List cleansedArgs = new ArrayList<>(); - if(args != null) { - for(String argument : args) { - if(!argument.startsWith("--spring.cloud.task.parent-execution-id=")) { + if (args != null) { + for (String argument : args) { + if (!argument.startsWith("--spring.cloud.task.parent-execution-id=") && !argument.startsWith("--spring.cloud.task.parent-execution-id%")) { cleansedArgs.add(argument); + } else { + logger.debug("cleanse:removing argument:{}", argument); } } args = cleansedArgs; } - if(this.taskProperties.getExecutionid() != null) { - args.add("--spring.cloud.task.parent-execution-id=" + this.taskProperties.getExecutionid()); + if (args == null) { + args = new ArrayList<>(); } - if(StringUtils.hasText(this.composedTaskProperties.getPlatformName())) { + Long parentTaskExecutionId = getParentTaskExecutionId(contribution); + if (parentTaskExecutionId != null) { + args.add("--spring.cloud.task.parent-execution-id=" + parentTaskExecutionId); + String parentSchemaTarget = StringUtils.hasText(ctrSchemaTarget) ? ctrSchemaTarget : SchemaVersionTarget.defaultTarget().getName(); + args.add("--spring.cloud.task.parent-schema-target=" + parentSchemaTarget); + + } else { + logger.error("Cannot find task execution id"); + } + + if (StringUtils.hasText(this.composedTaskProperties.getPlatformName())) { properties.put("spring.cloud.dataflow.task.platformName", this.composedTaskProperties.getPlatformName()); } - this.executionId = taskOperations.launch(tmpTaskName, - this.properties, args); + logger.debug("execute:{}:{}:{}", tmpTaskName, this.properties, args); + LaunchResponseResource response = taskOperations.launch(tmpTaskName, this.properties, args); - stepExecutionContext.put("task-execution-id", executionId); - stepExecutionContext.put("task-arguments", args); - } - else { + this.executionId = response.getExecutionId(); + + stepExecutionContext.put("task-execution-id", response.getExecutionId()); + stepExecutionContext.put("schema-target", response.getSchemaTarget()); + + stepExecutionContext.put("task-name", tmpTaskName); + if (!args.isEmpty()) { + stepExecutionContext.put("task-arguments", args); + } + Boolean ignoreExitMessage = isIgnoreExitMessage(args, this.properties); + if (ignoreExitMessage != null) { + stepExecutionContext.put(IGNORE_EXIT_MESSAGE, ignoreExitMessage); + } + } else { try { Thread.sleep(this.composedTaskProperties.getIntervalTimeBetweenChecks()); - } - catch (InterruptedException e) { + } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IllegalStateException(e.getMessage(), e); } - TaskExecution taskExecution = - this.taskExplorer.getTaskExecution(this.executionId); + TaskExecution taskExecution = this.taskExplorer.getTaskExecution(this.executionId); if (taskExecution != null && taskExecution.getEndTime() != null) { if (taskExecution.getExitCode() == null) { - throw new UnexpectedJobExecutionException("Task returned a null exit code."); + throw new UnexpectedTaskExecutionException("Task returned a null exit code.", taskExecution); } else if (taskExecution.getExitCode() != 0) { - throw new UnexpectedJobExecutionException("Task returned a non zero exit code."); + throw new UnexpectedTaskExecutionException("Task returned a non zero exit code.", taskExecution); } else { return RepeatStatus.FINISHED; } } + if (this.composedTaskProperties.getMaxStartWaitTime() > 0 && + (taskExecution == null || taskExecution.getStartTime() == null) && + System.currentTimeMillis() > startTimeout) { + throw new TaskExecutionTimeoutException(String.format( + "Timeout occurred during startup of task with Execution Id %s", + this.executionId)); + } if (this.composedTaskProperties.getMaxWaitTime() > 0 && System.currentTimeMillis() > timeout) { throw new TaskExecutionTimeoutException(String.format( @@ -205,8 +266,22 @@ else if (taskExecution.getExitCode() != 0) { return RepeatStatus.CONTINUABLE; } + public Long getParentTaskExecutionId(StepContribution stepContribution) { + Long result = null; + if (this.taskProperties.getExecutionid() != null) { + result = this.taskProperties.getExecutionid(); + logger.debug("getParentTaskExecutionId:taskProperties.executionId={}", result); + } else if (ComposedTaskRunnerTaskListener.getExecutionId() != null) { + result = ComposedTaskRunnerTaskListener.getExecutionId(); + logger.debug("getParentTaskExecutionId:ComposedTaskRunnerTaskListener.executionId={}", result); + } else if (stepContribution != null) { + result = this.taskExplorer.getTaskExecutionIdByJobExecutionId(stepContribution.getStepExecution().getJobExecutionId()); + } + return result; + } + public TaskOperations taskOperations() { - if(this.taskOperations == null) { + if (this.taskOperations == null) { this.taskOperations = dataFlowOperations().taskOperations(); if (taskOperations == null) { throw new ComposedTaskException("Unable to connect to Data Flow " + @@ -242,18 +317,15 @@ protected DataFlowOperations dataFlowOperations() { final OAuth2AccessTokenResponse res = this.clientCredentialsTokenResponseClient.getTokenResponse(grantRequest); accessTokenValue = res.getAccessToken().getTokenValue(); logger.debug("Configured OAuth2 Client Credentials for accessing the Data Flow Server"); - } - else if (StringUtils.hasText(this.composedTaskProperties.getDataflowServerAccessToken())) { + } else if (StringUtils.hasText(this.composedTaskProperties.getDataflowServerAccessToken())) { accessTokenValue = this.composedTaskProperties.getDataflowServerAccessToken(); logger.debug("Configured OAuth2 Access Token for accessing the Data Flow Server"); - } - else if (StringUtils.hasText(this.composedTaskProperties.getDataflowServerUsername()) + } else if (StringUtils.hasText(this.composedTaskProperties.getDataflowServerUsername()) && StringUtils.hasText(this.composedTaskProperties.getDataflowServerPassword())) { - accessTokenValue = null; - clientHttpRequestFactoryBuilder.basicAuthCredentials(composedTaskProperties.getDataflowServerUsername(), composedTaskProperties.getDataflowServerPassword()); + clientHttpRequestFactoryBuilder.basicAuthCredentials(composedTaskProperties.getDataflowServerUsername(), + composedTaskProperties.getDataflowServerPassword()); logger.debug("Configured basic security for accessing the Data Flow Server"); - } - else { + } else { logger.debug("Not configuring basic security for accessing the Data Flow Server"); } @@ -272,7 +344,7 @@ else if (StringUtils.hasText(this.composedTaskProperties.getDataflowServerUserna restTemplate.setRequestFactory(clientHttpRequestFactoryBuilder.buildClientHttpRequestFactory()); } - return new DataFlowTemplate(this.composedTaskProperties.getDataflowServerUri(), restTemplate); + return new DataFlowTemplate(this.composedTaskProperties.getDataflowServerUri(), restTemplate, mapper); } private void validateUsernamePassword(String userName, String password) { @@ -283,4 +355,42 @@ private void validateUsernamePassword(String userName, String password) { throw new IllegalArgumentException("A username may be specified only together with a password"); } } + + private Boolean isIgnoreExitMessage(List args, Map properties) { + Boolean result = null; + + if (properties != null) { + MapConfigurationPropertySource mapConfigurationPropertySource = new MapConfigurationPropertySource(); + properties.forEach((key, value) -> { + key = key.substring(key.lastIndexOf(".") + 1); + mapConfigurationPropertySource.put(key, value); + }); + result = isIgnoreMessagePresent(mapConfigurationPropertySource); + } + + if (args != null) { + MapConfigurationPropertySource mapConfigurationPropertySource = new MapConfigurationPropertySource(); + for (String arg : args) { + int firstEquals = arg.indexOf('='); + if (firstEquals != -1) { + mapConfigurationPropertySource.put(arg.substring(0, firstEquals), arg.substring(firstEquals + 1).trim()); + } + } + Boolean argResult = isIgnoreMessagePresent(mapConfigurationPropertySource); + if (argResult != null) { + result = argResult; + } + } + return result; + } + + private Boolean isIgnoreMessagePresent(MapConfigurationPropertySource mapConfigurationPropertySource) { + Binder binder = new Binder(mapConfigurationPropertySource); + try { + return binder.bind(IGNORE_EXIT_MESSAGE_PROPERTY, Bindable.of(Boolean.class)).get(); + } catch (Exception e) { + // error means we couldn't bind, caller seem to handle null + } + return null; + } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskProperties.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskProperties.java index 0d58942f0b..b498e5e67b 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskProperties.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskProperties.java @@ -24,6 +24,7 @@ import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.NestedConfigurationProperty; +import org.springframework.security.oauth2.core.ClientAuthenticationMethod; /** * Configuration properties used to setup the ComposedTaskRunner. @@ -34,6 +35,8 @@ @ConfigurationProperties public class ComposedTaskProperties { + public static final int MAX_START_WAIT_TIME_DEFAULT = 0; + public static final int MAX_WAIT_TIME_DEFAULT = 0; public static final int INTERVAL_TIME_BETWEEN_CHECKS_DEFAULT = 10000; @@ -46,6 +49,12 @@ public class ComposedTaskProperties { public static final int SPLIT_THREAD_QUEUE_CAPACITY_DEFAULT = Integer.MAX_VALUE; + /** + * The maximum amount of time in millis that the ComposedTaskRunner will wait for the + * start_time of a steps taskExecution to be set before the execution of the Composed task is failed. + */ + private int maxStartWaitTime = MAX_START_WAIT_TIME_DEFAULT; + /** * The maximum amount of time in millis that a individual step can run before * the execution of the Composed task is failed. @@ -99,6 +108,12 @@ public class ComposedTaskProperties { */ private String oauth2ClientCredentialsClientSecret; + /** + * The OAuth2 Client Authentication Method (Used for client credentials grant to + * specify how {@link #oauth2ClientCredentialsClientId} and {@link #oauth2ClientCredentialsClientSecret} are + * going to be sent). + */ + private ClientAuthenticationMethod oauth2ClientCredentialsClientAuthenticationMethod; /** * Token URI for the OAuth2 provider (Used for the client credentials grant). */ @@ -214,6 +229,14 @@ public ComposedTaskProperties() { } } + public int getMaxStartWaitTime() { + return this.maxStartWaitTime; + } + + public void setMaxStartWaitTime(int maxStartWaitTime) { + this.maxStartWaitTime = maxStartWaitTime; + } + public int getMaxWaitTime() { return this.maxWaitTime; } @@ -374,6 +397,14 @@ public void setOauth2ClientCredentialsClientSecret(String oauth2ClientCredential this.oauth2ClientCredentialsClientSecret = oauth2ClientCredentialsClientSecret; } + public ClientAuthenticationMethod getOauth2ClientCredentialsClientAuthenticationMethod(){ + return oauth2ClientCredentialsClientAuthenticationMethod; + } + + public void setOauth2ClientCredentialsClientAuthenticationMethod(ClientAuthenticationMethod oauth2ClientCredentialsClientAuthenticationMethod){ + this.oauth2ClientCredentialsClientAuthenticationMethod = oauth2ClientCredentialsClientAuthenticationMethod; + } + public String getOauth2ClientCredentialsTokenUri() { return oauth2ClientCredentialsTokenUri; } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/support/UnexpectedTaskExecutionException.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/support/UnexpectedTaskExecutionException.java new file mode 100644 index 0000000000..4f0c54339c --- /dev/null +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/support/UnexpectedTaskExecutionException.java @@ -0,0 +1,187 @@ +/* + * Copyright 2017-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.composedtaskrunner.support; + +import java.util.Date; + +import org.springframework.batch.core.UnexpectedJobExecutionException; +import org.springframework.boot.ExitCodeGenerator; +import org.springframework.cloud.task.repository.TaskExecution; + +/** + * Creates a {@link UnexpectedTaskExecutionException} which extends {@link UnsupportedOperationException}, but + * also contains the exitCode as information. + * + * @author Tobias Soloschenko + */ +public class UnexpectedTaskExecutionException extends UnexpectedJobExecutionException implements ExitCodeGenerator { + + private static final long serialVersionUID = 1080992679855603656L; + + /** + * The unique id associated with the task execution. + */ + private long executionId; + + /** + * The parent task execution id. + */ + private Long parentExecutionId; + + /** + * The recorded exit code for the task. + */ + private Integer exitCode = -1; + + /** + * User defined name for the task. + */ + private String taskName; + + /** + * Time of when the task was started. + */ + private Date startTime; + + /** + * Timestamp of when the task was completed/terminated. + */ + private Date endTime; + + /** + * Message returned from the task or stacktrace. + */ + private String exitMessage; + + /** + * Id assigned to the task by the platform. + */ + private String externalExecutionId; + + /** + * Error information available upon the failure of a task. + */ + private String errorMessage; + + /** + * Constructs an UnexpectedTaskExecutionException with the specified + * detail message. + * + * @param message the detail message + */ + public UnexpectedTaskExecutionException(String message) { + super(message); + } + + /** + * Constructs an UnexpectedTaskExecutionException with the specified + * detail message, cause and exitCode. + * + * @param message the detail message + * @param cause the cause which leads to this exception + */ + public UnexpectedTaskExecutionException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Constructs an UnexpectedTaskExecutionException with the specified + * detail message and taskExecution. + * + * @param message the detail message + * @param taskExecution the taskExecution of the task + */ + public UnexpectedTaskExecutionException(String message, TaskExecution taskExecution) { + this(message); + assignTaskExecutionFields(taskExecution); + } + + /** + * Constructs an UnexpectedTaskExecutionException with the specified + * detail message, cause and taskExecution. + * + * @param message the detail message + * @param cause the cause which leads to this exception + * @param taskExecution the taskExecution of the task + */ + public UnexpectedTaskExecutionException(String message, Throwable cause, TaskExecution taskExecution) { + this(message, cause); + assignTaskExecutionFields(taskExecution); + } + + /** + * Assigns the task execution fields to this exception. + * + * @param taskExecution the task execution of which the fields should be assigned to this exception + */ + private void assignTaskExecutionFields(TaskExecution taskExecution) { + if(taskExecution != null) { + executionId = taskExecution.getExecutionId(); + parentExecutionId = taskExecution.getParentExecutionId(); + exitCode = taskExecution.getExitCode(); + taskName = taskExecution.getTaskName(); + startTime = taskExecution.getStartTime(); + endTime = taskExecution.getEndTime(); + externalExecutionId = taskExecution.getExternalExecutionId(); + errorMessage = taskExecution.getErrorMessage(); + exitMessage = taskExecution.getExitMessage(); + } + } + + public long getExecutionId() { + return this.executionId; + } + + /** + * Returns the exit code of the task. + * + * @return the exit code or -1 if the exit code couldn't be determined + */ + @Override + public int getExitCode() { + return this.exitCode; + } + + public String getTaskName() { + return this.taskName; + } + + public Date getStartTime() { + return (this.startTime != null) ? (Date) this.startTime.clone() : null; + } + + public Date getEndTime() { + return (this.endTime != null) ? (Date) this.endTime.clone() : null; + } + + public String getExitMessage() { + return this.exitMessage; + } + + public String getErrorMessage() { + return this.errorMessage; + } + + public String getExternalExecutionId() { + return this.externalExecutionId; + } + + public Long getParentExecutionId() { + return this.parentExecutionId; + } + +} \ No newline at end of file diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/dataflow-configuration-metadata.properties b/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/dataflow-configuration-metadata.properties index c29bf9ec2e..4c8de30b72 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/dataflow-configuration-metadata.properties +++ b/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/dataflow-configuration-metadata.properties @@ -1 +1 @@ -configuration-properties.classes=org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties +configuration-properties.classes=org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties, org.springframework.cloud.task.configuration.TaskProperties diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/spring-configuration-metadata-whitelist.properties b/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/spring-configuration-metadata-whitelist.properties index c29bf9ec2e..4c8de30b72 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/spring-configuration-metadata-whitelist.properties +++ b/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/spring-configuration-metadata-whitelist.properties @@ -1 +1 @@ -configuration-properties.classes=org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties +configuration-properties.classes=org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties, org.springframework.cloud.task.configuration.TaskProperties diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/spring-configuration-metadata.json b/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/spring-configuration-metadata.json index 1993480c13..56390bb197 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/spring-configuration-metadata.json +++ b/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/spring-configuration-metadata.json @@ -17,6 +17,12 @@ "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", "sourceMethod": "getComposedTaskAppProperties()" } + , + { + "name": "task-app-properties", + "type": "org.springframework.cloud.task.configuration.TaskProperties", + "sourceType": "org.springframework.cloud.task.configuration.TaskProperties" + } ], "properties": [ { @@ -76,6 +82,13 @@ "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", "defaultValue": 10000 }, + { + "name": "max-start-wait-time", + "type": "java.lang.Integer", + "description": "Determines the maximum time each child task is allowed for application startup. The default of `0` indicates no timeout.", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", + "defaultValue": 0 + }, { "name": "max-wait-time", "type": "java.lang.Integer", diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/resources/application.properties b/spring-cloud-dataflow-composed-task-runner/src/main/resources/application.properties new file mode 100644 index 0000000000..4a28840efe --- /dev/null +++ b/spring-cloud-dataflow-composed-task-runner/src/main/resources/application.properties @@ -0,0 +1 @@ +spring.cloud.task.closecontext-enabled=true diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationJobIncrementerTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationJobIncrementerTests.java index 42b5f77cc1..84efa92f0c 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationJobIncrementerTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationJobIncrementerTests.java @@ -17,8 +17,6 @@ package org.springframework.cloud.dataflow.composedtaskrunner; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; - import org.springframework.batch.core.Job; import org.springframework.batch.core.JobParameters; @@ -29,17 +27,15 @@ import org.springframework.cloud.common.security.CommonSecurityAutoConfiguration; import org.springframework.cloud.dataflow.composedtaskrunner.configuration.DataFlowTestConfiguration; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.util.Assert; /** * @author Glenn Renfro */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, - DataFlowTestConfiguration.class,StepBeanDefinitionRegistrar.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, + DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, ComposedTaskRunnerConfiguration.class, StepBeanDefinitionRegistrar.class}) @EnableAutoConfiguration(exclude = { CommonSecurityAutoConfiguration.class}) diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationNoPropertiesTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationNoPropertiesTests.java index cbb31da5e3..059e3e353e 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationNoPropertiesTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationNoPropertiesTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,13 +16,12 @@ package org.springframework.cloud.dataflow.composedtaskrunner; -import java.util.ArrayList; -import java.util.HashMap; - +import java.util.Arrays; +import java.util.Collections; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobParameters; @@ -31,15 +30,16 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration; +import org.springframework.boot.test.system.CapturedOutput; +import org.springframework.boot.test.system.OutputCaptureExtension; import org.springframework.cloud.common.security.CommonSecurityAutoConfiguration; import org.springframework.cloud.dataflow.composedtaskrunner.configuration.DataFlowTestConfiguration; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; import org.springframework.cloud.dataflow.rest.client.TaskOperations; import org.springframework.context.ApplicationContext; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.Assert; @@ -50,13 +50,13 @@ /** * @author Glenn Renfro */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, - DataFlowTestConfiguration.class,StepBeanDefinitionRegistrar.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, + DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, ComposedTaskRunnerConfiguration.class, StepBeanDefinitionRegistrar.class}) -@TestPropertySource(properties = {"graph=AAA && BBB && CCC","max-wait-time=1000", "spring.cloud.task.name=foo"}) -@EnableAutoConfiguration(exclude = { CommonSecurityAutoConfiguration.class}) +@TestPropertySource(properties = {"graph=AAA && BBB && CCC", "max-wait-time=1000", "spring.cloud.task.name=foo"}) +@EnableAutoConfiguration(exclude = {CommonSecurityAutoConfiguration.class}) +@ExtendWith(OutputCaptureExtension.class) public class ComposedTaskRunnerConfigurationNoPropertiesTests { @Autowired @@ -73,7 +73,7 @@ public class ComposedTaskRunnerConfigurationNoPropertiesTests { @Test @DirtiesContext - public void testComposedConfiguration() throws Exception { + public void testComposedConfiguration(CapturedOutput outputCapture) throws Exception { JobExecution jobExecution = this.jobRepository.createJobExecution( "ComposedTest", new JobParameters()); TaskletStep ctrStep = context.getBean("AAA_0", TaskletStep.class); @@ -83,6 +83,18 @@ public void testComposedConfiguration() throws Exception { assertThat(composedTaskProperties.getTransactionIsolationLevel()).isEqualTo("ISOLATION_REPEATABLE_READ"); Assert.notNull(job.getJobParametersIncrementer(), "JobParametersIncrementer must not be null."); - verify(taskOperations).launch("AAA", new HashMap<>(0), new ArrayList<>(0)); + + verify(taskOperations).launch( + "AAA", + Collections.emptyMap(), + Arrays.asList("--spring.cloud.task.parent-execution-id=1", "--spring.cloud.task.parent-schema-target=boot2") + ); + + String logEntries = outputCapture.toString(); + assertThat(logEntries).contains("Cannot find [app.AAA.spring.cloud.task.table-prefix, " + + "app.AAA.spring.cloud.task.table_prefix, app.AAA.spring.cloud.task.tablePrefix, " + + "app.AAA.spring.cloud.task.tableprefix, app.AAA.spring.cloud.task.TABLE-PREFIX, " + + "app.AAA.spring.cloud.task.TABLE_PREFIX, app.AAA.spring.cloud.task.TABLEPREFIX]"); + assertThat(logEntries).doesNotContain("taskExplorerContainer:adding:"); } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithAppArgumentsPropertiesTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithAppArgumentsPropertiesTests.java index 337a36d652..e0e6dde04c 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithAppArgumentsPropertiesTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithAppArgumentsPropertiesTests.java @@ -20,7 +20,6 @@ import java.util.Map; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; @@ -35,17 +34,15 @@ import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskRunnerTaskletTestUtils; import org.springframework.context.ApplicationContext; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import static org.assertj.core.api.Assertions.assertThat; /** * @author Janne Valkealahti */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, ComposedTaskRunnerConfiguration.class}) @TestPropertySource(properties = {"graph=ComposedTest-AAA && ComposedTest-BBB && ComposedTest-CCC","max-wait-time=1010", diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesNoLabelTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesNoLabelTests.java index 6a5b75c50b..b0a35360b1 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesNoLabelTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesNoLabelTests.java @@ -21,7 +21,6 @@ import java.util.Map; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; @@ -36,9 +35,8 @@ import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskRunnerTaskletTestUtils; import org.springframework.context.ApplicationContext; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.util.Assert; import static org.assertj.core.api.Assertions.assertThat; @@ -46,9 +44,8 @@ /** * @author Glenn Renfro */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, - DataFlowTestConfiguration.class,StepBeanDefinitionRegistrar.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, + DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, ComposedTaskRunnerConfiguration.class}) @TestPropertySource(properties = {"graph=ComposedTest-AAA && ComposedTest-BBB && ComposedTest-CCC","max-wait-time=1010", "skip-tls-certificate-verification=true", diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesTests.java index 6fb20cc334..17685bae16 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,8 +22,8 @@ import java.util.Map; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobParameters; @@ -32,15 +32,17 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration; +import org.springframework.boot.test.system.CapturedOutput; +import org.springframework.boot.test.system.OutputCaptureExtension; import org.springframework.cloud.common.security.CommonSecurityAutoConfiguration; import org.springframework.cloud.dataflow.composedtaskrunner.configuration.DataFlowTestConfiguration; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; import org.springframework.cloud.dataflow.rest.client.TaskOperations; +import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.context.ApplicationContext; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.Assert; @@ -51,17 +53,17 @@ /** * @author Glenn Renfro */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, - DataFlowTestConfiguration.class,StepBeanDefinitionRegistrar.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, + DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, ComposedTaskRunnerConfiguration.class, StepBeanDefinitionRegistrar.class}) @TestPropertySource(properties = {"graph=ComposedTest-AAA && ComposedTest-BBB && ComposedTest-CCC","max-wait-time=1010", "composed-task-properties=" + ComposedTaskRunnerConfigurationWithPropertiesTests.COMPOSED_TASK_PROPS , "interval-time-between-checks=1100", "composed-task-arguments=--baz=boo --AAA.foo=bar BBB.que=qui", - "transaction-isolation-level=ISOLATION_READ_COMMITTED", - "dataflow-server-uri=https://bar", "spring.cloud.task.name=ComposedTest"}) + "transaction-isolation-level=ISOLATION_READ_COMMITTED","spring.cloud.task.closecontext-enabled=true", + "dataflow-server-uri=https://bar", "spring.cloud.task.name=ComposedTest","max-start-wait-time=1011"}) @EnableAutoConfiguration(exclude = { CommonSecurityAutoConfiguration.class}) +@ExtendWith(OutputCaptureExtension.class) public class ComposedTaskRunnerConfigurationWithPropertiesTests { @Autowired @@ -73,16 +75,22 @@ public class ComposedTaskRunnerConfigurationWithPropertiesTests { @Autowired private ComposedTaskProperties composedTaskProperties; + @Autowired + private TaskProperties taskProperties; + @Autowired ApplicationContext context; protected static final String COMPOSED_TASK_PROPS = "app.ComposedTest-AAA.format=yyyy, " - + "app.ComposedTest-BBB.format=mm, " - + "deployer.ComposedTest-AAA.memory=2048m"; + + "app.ComposedTest-AAA.spring.cloud.task.table-prefix=BOOT3_," + + "app.ComposedTest-BBB.spring.cloud.task.tableprefix=BOOT3_," + + "app.ComposedTest-CCC.spring.cloud.task.tablePrefix=BOOT3_," + + "app.ComposedTest-BBB.format=mm, " + + "deployer.ComposedTest-AAA.memory=2048m"; @Test @DirtiesContext - public void testComposedConfiguration() throws Exception { + public void testComposedConfiguration(CapturedOutput outputCapture) throws Exception { assertThat(composedTaskProperties.isSkipTlsCertificateVerification()).isFalse(); JobExecution jobExecution = this.jobRepository.createJobExecution( @@ -96,15 +104,29 @@ public void testComposedConfiguration() throws Exception { Map props = new HashMap<>(1); props.put("format", "yyyy"); props.put("memory", "2048m"); + props.put("spring.cloud.task.table-prefix", "BOOT3_"); + assertThat(composedTaskProperties.getComposedTaskProperties()).isEqualTo(COMPOSED_TASK_PROPS); assertThat(composedTaskProperties.getMaxWaitTime()).isEqualTo(1010); + assertThat(composedTaskProperties.getMaxStartWaitTime()).isEqualTo(1011); assertThat(composedTaskProperties.getIntervalTimeBetweenChecks()).isEqualTo(1100); assertThat(composedTaskProperties.getDataflowServerUri().toASCIIString()).isEqualTo("https://bar"); assertThat(composedTaskProperties.getTransactionIsolationLevel()).isEqualTo("ISOLATION_READ_COMMITTED"); + assertThat(taskProperties.getClosecontextEnabled()).isTrue(); - List args = new ArrayList<>(1); + List args = new ArrayList<>(2); args.add("--baz=boo --foo=bar"); + args.add("--spring.cloud.task.parent-execution-id=1"); + args.add("--spring.cloud.task.parent-schema-target=boot2"); Assert.notNull(job.getJobParametersIncrementer(), "JobParametersIncrementer must not be null."); + verify(taskOperations).launch("ComposedTest-AAA", props, args); + + String logEntries = outputCapture.toString(); + assertThat(logEntries).contains("taskExplorerContainer:adding:ComposedTest-AAA:BOOT3_"); + + assertThat(logEntries).contains("taskExplorerContainer:adding:ComposedTest-BBB:BOOT3_"); + assertThat(logEntries).contains("taskExplorerContainer:adding:ComposedTest-CCC:BOOT3_"); + } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesWithLabelTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesWithLabelTests.java index c65a77ab9d..9f98bdfc4f 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesWithLabelTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesWithLabelTests.java @@ -22,7 +22,8 @@ import java.util.Map; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; @@ -37,9 +38,8 @@ import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskRunnerTaskletTestUtils; import org.springframework.context.ApplicationContext; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.util.Assert; import static org.assertj.core.api.Assertions.assertThat; @@ -47,18 +47,19 @@ /** * @author Glenn Renfro */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, - DataFlowTestConfiguration.class,StepBeanDefinitionRegistrar.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, + DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, ComposedTaskRunnerConfiguration.class}) -@TestPropertySource(properties = {"graph=ComposedTest-l1 && ComposedTest-l2 && ComposedTest-l3","max-wait-time=1010", - "composed-task-app-properties.app.l1.AAA.format=yyyy", - "interval-time-between-checks=1100", - "composed-task-arguments=--baz=boo", - "dataflow-server-uri=https://bar", "spring.cloud.task.name=ComposedTest"}) +@TestPropertySource(properties = {"graph=ComposedTest-l1 && ComposedTest-l2 && ComposedTest-l11","max-wait-time=1010", + "composed-task-app-properties.app.l1.AAA.format=yyyy", + "composed-task-app-properties.app.l11.AAA.format=yyyy", + "composed-task-app-properties.app.l2.AAA.format=yyyy", + "interval-time-between-checks=1100", + "composed-task-arguments=--baz=boo", + "dataflow-server-uri=https://bar", "spring.cloud.task.name=ComposedTest"}) @EnableAutoConfiguration(exclude = { CommonSecurityAutoConfiguration.class}) public class ComposedTaskRunnerConfigurationWithPropertiesWithLabelTests { - + private static final Logger logger = LoggerFactory.getLogger(ComposedTaskRunnerConfigurationWithPropertiesWithLabelTests.class); @Autowired private JobRepository jobRepository; @@ -75,13 +76,17 @@ public class ComposedTaskRunnerConfigurationWithPropertiesWithLabelTests { @DirtiesContext public void testComposedConfiguration() throws Exception { JobExecution jobExecution = this.jobRepository.createJobExecution( - "ComposedTest", new JobParameters()); + "ComposedTest", new JobParameters()); job.execute(jobExecution); Map props = new HashMap<>(1); props.put("app.l1.AAA.format", "yyyy"); - Map composedTaskAppProperties = new HashMap<>(1); + props.put("app.l2.AAA.format", "yyyy"); + props.put("app.l11.AAA.format", "yyyy"); + Map composedTaskAppProperties = new HashMap<>(); composedTaskAppProperties.put("app.l1.AAA.format", "yyyy"); + composedTaskAppProperties.put("app.l2.AAA.format", "yyyy"); + composedTaskAppProperties.put("app.l11.AAA.format", "yyyy"); assertThat(composedTaskProperties.getComposedTaskAppProperties()).isEqualTo(composedTaskAppProperties); assertThat(composedTaskProperties.getMaxWaitTime()).isEqualTo(1010); @@ -95,6 +100,8 @@ public void testComposedConfiguration() throws Exception { assertThat(result).contains("--baz=boo"); assertThat(result.size()).isEqualTo(1); Map taskletProperties = ComposedTaskRunnerTaskletTestUtils.getTaskletPropertiesViaReflection(tasklet); + logger.info("taskletProperties:{}", taskletProperties); + assertThat(taskletProperties.keySet()).containsExactly("app.l1.AAA.format"); assertThat(taskletProperties.size()).isEqualTo(1); assertThat(taskletProperties.get("app.l1.AAA.format")).isEqualTo("yyyy"); } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithVersionPropertiesTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithVersionPropertiesTests.java index 213de991f2..cbc9450956 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithVersionPropertiesTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithVersionPropertiesTests.java @@ -22,7 +22,6 @@ import java.util.Map; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; @@ -37,18 +36,16 @@ import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskRunnerTaskletTestUtils; import org.springframework.context.ApplicationContext; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import static org.assertj.core.api.Assertions.assertThat; /** * @author Janne Valkealahti */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, - DataFlowTestConfiguration.class,StepBeanDefinitionRegistrar.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, + DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, ComposedTaskRunnerConfiguration.class}) @TestPropertySource(properties = {"graph=ComposedTest-AAA && ComposedTest-BBB && ComposedTest-CCC","max-wait-time=1010", "composed-task-properties=" + ComposedTaskRunnerConfigurationWithVersionPropertiesTests.COMPOSED_TASK_PROPS , diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactoryTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactoryTests.java index 6ba343310b..9223a611c9 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactoryTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactoryTests.java @@ -16,10 +16,11 @@ package org.springframework.cloud.dataflow.composedtaskrunner; +import java.util.Collections; + import javax.sql.DataSource; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.batch.core.Step; import org.springframework.batch.core.StepExecutionListener; @@ -35,8 +36,7 @@ import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; @@ -44,17 +44,18 @@ /** * @author Glenn Renfro + * @author Corneil du Plessis */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={org.springframework.cloud.dataflow.composedtaskrunner.ComposedTaskRunnerStepFactoryTests.StepFactoryConfiguration.class}) +@SpringJUnitConfig(classes = {org.springframework.cloud.dataflow.composedtaskrunner.ComposedTaskRunnerStepFactoryTests.StepFactoryConfiguration.class}) public class ComposedTaskRunnerStepFactoryTests { @Autowired ComposedTaskRunnerStepFactory stepFactory; @Test - public void testStep() throws Exception{ + public void testStep() throws Exception { Step step = stepFactory.getObject(); + assertThat(step).isNotNull(); assertThat(step.getName()).isEqualTo("FOOBAR"); assertThat(step.getStartLimit()).isEqualTo(Integer.MAX_VALUE); } @@ -68,6 +69,12 @@ public static class StepFactoryConfiguration { @MockBean public TaskOperations taskOperations; + @Bean + public TaskExplorerContainer taskExplorerContainer() { + TaskExplorer taskExplorer = mock(TaskExplorer.class); + return new TaskExplorerContainer(Collections.emptyMap(), taskExplorer); + } + @Bean public ComposedTaskProperties composedTaskProperties() { return new ComposedTaskProperties(); @@ -79,7 +86,7 @@ public TaskProperties taskProperties() { } @Bean - public StepBuilderFactory steps(){ + public StepBuilderFactory steps() { return new StepBuilderFactory(mock(JobRepository.class), mock(PlatformTransactionManager.class)); } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListenerTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListenerTests.java index e80a0074e3..08f31756a0 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListenerTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListenerTests.java @@ -16,6 +16,7 @@ package org.springframework.cloud.dataflow.composedtaskrunner; +import java.util.Collections; import java.util.Date; import org.junit.jupiter.api.BeforeEach; @@ -24,6 +25,7 @@ import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.StepExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.test.util.ReflectionTestUtils; @@ -38,6 +40,7 @@ */ public class ComposedTaskStepExecutionListenerTests { + private TaskExplorerContainer taskExplorerContainer; private TaskExplorer taskExplorer; private StepExecution stepExecution; @@ -47,17 +50,16 @@ public class ComposedTaskStepExecutionListenerTests { @BeforeEach public void setup() { this.taskExplorer = mock(TaskExplorer.class); + this.taskExplorerContainer = new TaskExplorerContainer(Collections.emptyMap(), taskExplorer); this.stepExecution = getStepExecution(); - this.taskListener = - new ComposedTaskStepExecutionListener(this.taskExplorer); - ReflectionTestUtils.setField(this.taskListener, "taskExplorer", this.taskExplorer); + this.taskListener = new ComposedTaskStepExecutionListener(this.taskExplorerContainer); } @Test public void testSuccessfulRun() { TaskExecution taskExecution = getDefaultTaskExecution(0, null); when(this.taskExplorer.getTaskExecution(anyLong())).thenReturn(taskExecution); - populateExecutionContext(111L); + populateExecutionContext(taskExecution.getTaskName(),111L, SchemaVersionTarget.defaultTarget().getName()); assertThat(this.taskListener.afterStep(this.stepExecution)).isEqualTo(ExitStatus.COMPLETED); } @@ -67,7 +69,7 @@ public void testExitMessageRunSuccess() { TaskExecution taskExecution = getDefaultTaskExecution(0, expectedTaskStatus.getExitCode()); when(this.taskExplorer.getTaskExecution(anyLong())).thenReturn(taskExecution); - populateExecutionContext(111L); + populateExecutionContext(taskExecution.getTaskName(), 111L, SchemaVersionTarget.defaultTarget().getName()); assertThat(this.taskListener.afterStep(this.stepExecution)).isEqualTo(expectedTaskStatus); } @@ -78,7 +80,7 @@ public void testExitMessageRunFail() { TaskExecution taskExecution = getDefaultTaskExecution(1, expectedTaskStatus.getExitCode()); when(this.taskExplorer.getTaskExecution(anyLong())).thenReturn(taskExecution); - populateExecutionContext(111L); + populateExecutionContext(taskExecution.getTaskName(), 111L, SchemaVersionTarget.defaultTarget().getName()); assertThat(this.taskListener.afterStep(this.stepExecution)).isEqualTo(expectedTaskStatus); } @@ -87,7 +89,7 @@ public void testExitMessageRunFail() { public void testFailedRun() { TaskExecution taskExecution = getDefaultTaskExecution(1, null); when(this.taskExplorer.getTaskExecution(anyLong())).thenReturn(taskExecution); - populateExecutionContext(111L); + populateExecutionContext(taskExecution.getTaskName(), 111L, SchemaVersionTarget.defaultTarget().getName()); assertThat(this.taskListener.afterStep(this.stepExecution)).isEqualTo(ExitStatus.FAILED); } @@ -108,14 +110,16 @@ private StepExecution getStepExecution() { return new StepExecution(STEP_NAME, jobExecution); } - private void populateExecutionContext(Long taskExecutionId) { - this.stepExecution.getExecutionContext().put("task-execution-id", - taskExecutionId); + private void populateExecutionContext(String taskName, Long taskExecutionId, String schemaTarget) { + this.stepExecution.getExecutionContext().put("task-name", taskName); + this.stepExecution.getExecutionContext().put("task-execution-id", taskExecutionId); + this.stepExecution.getExecutionContext().put("schema-target", schemaTarget); } private TaskExecution getDefaultTaskExecution (int exitCode, String exitMessage) { TaskExecution taskExecution = new TaskExecution(); + taskExecution.setTaskName("test-ctr"); taskExecution.setExitMessage(exitMessage); taskExecution.setExitCode(exitCode); taskExecution.setEndTime(new Date()); diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTaskletTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTaskletTests.java index 79a90b179a..024175b0d5 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTaskletTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTaskletTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,26 +17,31 @@ package org.springframework.cloud.dataflow.composedtaskrunner; import java.util.ArrayList; +import java.util.Collections; import java.util.Date; import java.util.List; - import javax.sql.DataSource; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import org.assertj.core.api.Assertions; import org.assertj.core.api.AssertionsForClassTypes; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentMatchers; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.StepContribution; import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.UnexpectedJobExecutionException; import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.scope.context.StepContext; +import org.springframework.batch.item.ExecutionContext; import org.springframework.batch.repeat.RepeatStatus; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration; @@ -44,9 +49,15 @@ import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskException; import org.springframework.cloud.dataflow.composedtaskrunner.support.TaskExecutionTimeoutException; +import org.springframework.cloud.dataflow.composedtaskrunner.support.UnexpectedTaskExecutionException; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaTaskExecutionDaoFactoryBean; import org.springframework.cloud.dataflow.rest.client.DataFlowClientException; import org.springframework.cloud.dataflow.rest.client.DataFlowOperations; import org.springframework.cloud.dataflow.rest.client.TaskOperations; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; +import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; @@ -59,18 +70,20 @@ import org.springframework.cloud.task.repository.support.TaskRepositoryInitializer; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.core.env.Environment; import org.springframework.hateoas.Link; +import org.springframework.hateoas.mediatype.hal.Jackson2HalModule; import org.springframework.hateoas.mediatype.vnderrors.VndErrors; import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient; import org.springframework.security.oauth2.client.endpoint.OAuth2ClientCredentialsGrantRequest; import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.web.client.ResourceAccessException; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.mock; @@ -79,11 +92,10 @@ /** * @author Glenn Renfro */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, org.springframework.cloud.dataflow.composedtaskrunner.TaskLauncherTaskletTests.TestConfiguration.class}) public class TaskLauncherTaskletTests { - + private final static Logger logger = LoggerFactory.getLogger(TaskLauncherTaskletTests.class); private static final String TASK_NAME = "testTask1_0"; @Autowired @@ -98,21 +110,31 @@ public class TaskLauncherTaskletTests { @Autowired private JdbcTaskExecutionDao taskExecutionDao; + @Autowired + private Environment environment; private TaskOperations taskOperations; private TaskRepository taskRepository; private TaskExplorer taskExplorer; + private ObjectMapper mapper; + @BeforeEach public void setup() throws Exception{ + if (this.mapper == null) { + this.mapper = new ObjectMapper(); + this.mapper.registerModule(new Jdk8Module()); + this.mapper.registerModule(new Jackson2HalModule()); + this.mapper.registerModule(new JavaTimeModule()); + this.mapper.registerModule(new Jackson2DataflowModule()); + } this.taskRepositoryInitializer.setDataSource(this.dataSource); - this.taskRepositoryInitializer.afterPropertiesSet(); this.taskOperations = mock(TaskOperations.class); TaskExecutionDaoFactoryBean taskExecutionDaoFactoryBean = - new TaskExecutionDaoFactoryBean(this.dataSource); + new MultiSchemaTaskExecutionDaoFactoryBean(this.dataSource, "TASK_"); this.taskRepository = new SimpleTaskRepository(taskExecutionDaoFactoryBean); this.taskExplorer = new SimpleTaskExplorer(taskExecutionDaoFactoryBean); this.composedTaskProperties.setIntervalTimeBetweenChecks(500); @@ -120,7 +142,7 @@ public void setup() throws Exception{ @Test @DirtiesContext - public void testTaskLauncherTasklet() throws Exception{ + public void testTaskLauncherTasklet() { createCompleteTaskExecution(0); TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); @@ -130,6 +152,9 @@ public void testTaskLauncherTasklet() throws Exception{ assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() .get("task-execution-id")).isEqualTo(1L); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); mockReturnValForTaskExecution(2L); chunkContext = chunkContext(); @@ -139,17 +164,28 @@ public void testTaskLauncherTasklet() throws Exception{ assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() .get("task-execution-id")).isEqualTo(2L); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); } @Test @DirtiesContext - public void testInvalidTaskOperations() throws Exception{ - TaskLauncherTasklet taskLauncherTasklet = new TestTaskLauncherTasklet(null, null, - this.taskExplorer, this.composedTaskProperties, - TASK_NAME, new TaskProperties()); - Exception exception = assertThrows(ComposedTaskException.class, () -> { - execute(taskLauncherTasklet, null, chunkContext()); - }); + public void testInvalidTaskOperations() { + TaskLauncherTasklet taskLauncherTasklet = new TestTaskLauncherTasklet( + null, + null, + this.taskExplorer, + this.composedTaskProperties, + TASK_NAME, + new TaskProperties(), + environment, + mapper + ); + Exception exception = assertThrows( + ComposedTaskException.class, + () -> execute(taskLauncherTasklet, null, chunkContext()) + ); AssertionsForClassTypes.assertThat(exception.getMessage()).isEqualTo( "Unable to connect to Data Flow Server to execute task operations. " + "Verify that Data Flow Server's tasks/definitions endpoint can be accessed."); @@ -157,65 +193,91 @@ public void testInvalidTaskOperations() throws Exception{ @Test @DirtiesContext - public void testTaskLauncherTaskletWithTaskExecutionId() throws Exception{ - TaskLauncherTasklet taskLauncherTasklet = prepTaskLauncherTests(); - + public void testTaskLauncherTaskletWithTaskExecutionId() { TaskProperties taskProperties = new TaskProperties(); taskProperties.setExecutionid(88L); mockReturnValForTaskExecution(2L); ChunkContext chunkContext = chunkContext(); createCompleteTaskExecution(0); - taskLauncherTasklet = getTaskExecutionTasklet(taskProperties); + TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(taskProperties); taskLauncherTasklet.setArguments(null); execute(taskLauncherTasklet, null, chunkContext); assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() .get("task-execution-id")).isEqualTo(2L); - assertThat(((List) chunkContext.getStepContext() + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); + assertThat(((List) chunkContext.getStepContext() .getStepExecution().getExecutionContext() .get("task-arguments")).get(0)).isEqualTo("--spring.cloud.task.parent-execution-id=88"); } @Test @DirtiesContext - public void testTaskLauncherTaskletWithTaskExecutionIdWithPreviousParentID() throws Exception{ + public void testTaskLauncherTaskletWithoutTaskExecutionId() { + + mockReturnValForTaskExecution(2L); + ChunkContext chunkContext = chunkContext(); + JobExecution jobExecution = new JobExecution(0L, new JobParameters()); + + createAndStartCompleteTaskExecution(0, jobExecution); + + TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); + taskLauncherTasklet.setArguments(null); + StepExecution stepExecution = new StepExecution("stepName", jobExecution, 0L); + StepContribution contribution = new StepContribution(stepExecution); + execute(taskLauncherTasklet, contribution, chunkContext); + ExecutionContext executionContext = chunkContext.getStepContext().getStepExecution().getExecutionContext(); + logger.info("execution-context:{}", executionContext.entrySet()); + assertThat(executionContext.get("task-execution-id")).isEqualTo(2L); + assertThat(executionContext.get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); + assertThat(executionContext.get("task-arguments")).as("task-arguments not null").isNotNull(); + assertThat(((List) executionContext.get("task-arguments")).get(0)).isEqualTo("--spring.cloud.task.parent-execution-id=1"); + } + + @SuppressWarnings("unchecked") + @Test + @DirtiesContext + public void testTaskLauncherTaskletWithTaskExecutionIdWithPreviousParentID() { - TaskLauncherTasklet taskLauncherTasklet = prepTaskLauncherTests(); TaskProperties taskProperties = new TaskProperties(); taskProperties.setExecutionid(88L); mockReturnValForTaskExecution(2L); ChunkContext chunkContext = chunkContext(); createCompleteTaskExecution(0); - chunkContext.getStepContext() - .getStepExecution().getExecutionContext().put("task-arguments", new ArrayList()); - ((List)chunkContext.getStepContext() - .getStepExecution().getExecutionContext() - .get("task-arguments")).add("--spring.cloud.task.parent-execution-id=84"); - taskLauncherTasklet = getTaskExecutionTasklet(taskProperties); + ExecutionContext executionContext = chunkContext.getStepContext().getStepExecution().getExecutionContext(); + executionContext.put("task-arguments", new ArrayList()); + List taskArguments = (List) executionContext.get("task-arguments"); + assertThat(taskArguments).isNotNull().as("taskArguments"); + taskArguments.add("--spring.cloud.task.parent-execution-id=84"); + TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(taskProperties); taskLauncherTasklet.setArguments(null); execute(taskLauncherTasklet, null, chunkContext); - assertThat(chunkContext.getStepContext() - .getStepExecution().getExecutionContext() - .get("task-execution-id")).isEqualTo(2L); - assertThat(((List) chunkContext.getStepContext() - .getStepExecution().getExecutionContext() - .get("task-arguments")).get(0)).isEqualTo("--spring.cloud.task.parent-execution-id=88"); + executionContext = chunkContext.getStepContext().getStepExecution().getExecutionContext(); + taskArguments = (List) executionContext.get("task-arguments"); + assertThat(executionContext.get("task-execution-id")).isEqualTo(2L); + assertThat(executionContext.get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); + assertThat(((List) taskArguments).get(0)).isEqualTo("--spring.cloud.task.parent-execution-id=88"); } - private TaskLauncherTasklet prepTaskLauncherTests() throws Exception{ - createCompleteTaskExecution(0); - TaskLauncherTasklet taskLauncherTasklet = - getTaskExecutionTasklet(); - ChunkContext chunkContext = chunkContext(); + @Test + @DirtiesContext + public void testTaskLauncherTaskletStartTimeout() { mockReturnValForTaskExecution(1L); - execute(taskLauncherTasklet, null, chunkContext); - assertThat(chunkContext.getStepContext() - .getStepExecution().getExecutionContext() - .get("task-execution-id")).isEqualTo(1L); - assertThat(chunkContext.getStepContext() - .getStepExecution().getExecutionContext() - .get("task-arguments")).isNull(); - return taskLauncherTasklet; + this.composedTaskProperties.setMaxStartWaitTime(500); + this.composedTaskProperties.setIntervalTimeBetweenChecks(1000); + TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); + ChunkContext chunkContext = chunkContext(); + Throwable exception = assertThrows(TaskExecutionTimeoutException.class, () -> execute(taskLauncherTasklet, null, chunkContext)); + Assertions.assertThat(exception.getMessage()).isEqualTo("Timeout occurred during " + + "startup of task with Execution Id 1"); + + createCompleteTaskExecution(0); + this.composedTaskProperties.setMaxStartWaitTime(500); + this.composedTaskProperties.setIntervalTimeBetweenChecks(1000); + TaskLauncherTasklet taskLauncherTaskletNoTimeout = getTaskExecutionTasklet(); + assertDoesNotThrow(() -> execute(taskLauncherTaskletNoTimeout, null, chunkContext)); } @Test @@ -236,7 +298,7 @@ public void testTaskLauncherTaskletTimeout() { public void testInvalidTaskName() { final String ERROR_MESSAGE = "Could not find task definition named " + TASK_NAME; - VndErrors errors = new VndErrors("message", ERROR_MESSAGE, new Link("ref")); + VndErrors errors = new VndErrors("message", ERROR_MESSAGE, Link.of("ref")); Mockito.doThrow(new DataFlowClientException(errors)) .when(this.taskOperations) .launch(ArgumentMatchers.anyString(), @@ -245,7 +307,8 @@ public void testInvalidTaskName() { TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); ChunkContext chunkContext = chunkContext(); Throwable exception = assertThrows(DataFlowClientException.class, - () -> taskLauncherTasklet.execute(null, chunkContext)); + () -> taskLauncherTasklet.execute(null, chunkContext) + ); Assertions.assertThat(exception.getMessage()).isEqualTo(ERROR_MESSAGE); } @@ -271,14 +334,18 @@ public void testTaskLauncherTaskletFailure() { mockReturnValForTaskExecution(1L); TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); ChunkContext chunkContext = chunkContext(); - createCompleteTaskExecution(1); - Throwable exception = assertThrows(UnexpectedJobExecutionException.class, + createCompleteTaskExecution(1, "This is the exit message of the task itself."); + UnexpectedTaskExecutionException exception = assertThrows(UnexpectedTaskExecutionException.class, () -> execute(taskLauncherTasklet, null, chunkContext)); Assertions.assertThat(exception.getMessage()).isEqualTo("Task returned a non zero exit code."); + Assertions.assertThat(exception.getMessage()).isEqualTo("Task returned a non zero exit code."); + Assertions.assertThat(exception.getExitCode()).isEqualTo(1); + Assertions.assertThat(exception.getExitMessage()).isEqualTo("This is the exit message of the task itself."); + Assertions.assertThat(exception.getEndTime()).isNotNull(); } private RepeatStatus execute(TaskLauncherTasklet taskLauncherTasklet, StepContribution contribution, - ChunkContext chunkContext) throws Exception{ + ChunkContext chunkContext) { RepeatStatus status = taskLauncherTasklet.execute(contribution, chunkContext); if (!status.isContinuable()) { throw new IllegalStateException("Expected continuable status for the first execution."); @@ -289,13 +356,12 @@ private RepeatStatus execute(TaskLauncherTasklet taskLauncherTasklet, StepContri @Test @DirtiesContext - public void testTaskLauncherTaskletNullResult() throws Exception { - boolean isException = false; + public void testTaskLauncherTaskletNullResult() { mockReturnValForTaskExecution(1L); TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); ChunkContext chunkContext = chunkContext(); getCompleteTaskExecutionWithNull(); - Throwable exception = assertThrows(UnexpectedJobExecutionException.class, + Throwable exception = assertThrows(UnexpectedTaskExecutionException.class, () -> execute(taskLauncherTasklet, null, chunkContext)); Assertions.assertThat(exception.getMessage()).isEqualTo("Task returned a null exit code."); } @@ -308,7 +374,7 @@ public void testTaskOperationsConfiguredWithMissingPassword() { TaskLauncherTasklet taskLauncherTasklet = new TaskLauncherTasklet(null, null, this.taskExplorer, composedTaskProperties, - TASK_NAME, new TaskProperties()); + TASK_NAME, new TaskProperties(), environment, mapper); taskLauncherTasklet.taskOperations(); } catch (IllegalArgumentException e) { @@ -318,6 +384,79 @@ public void testTaskOperationsConfiguredWithMissingPassword() { fail("Expected an IllegalArgumentException to be thrown"); } + @Test + @DirtiesContext + public void testTaskLauncherTaskletIgnoreExitMessage() { + createCompleteTaskExecution(0); + + TaskLauncherTasklet taskLauncherTasklet = + getTaskExecutionTasklet(); + taskLauncherTasklet.setArguments(Collections.singletonList("--ignoreExitMessage=true")); + ChunkContext chunkContext = chunkContext(); + mockReturnValForTaskExecution(1L); + execute(taskLauncherTasklet, null, chunkContext); + Assertions.assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("task-execution-id")).isEqualTo(1L); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); + Assertions.assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .containsKey(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE)).isTrue(); + } + + @Test + @DirtiesContext + public void testTaskLauncherTaskletIgnoreExitMessageViaProperties() { + createCompleteTaskExecution(0); + + TaskLauncherTasklet taskLauncherTasklet = + getTaskExecutionTasklet(); + taskLauncherTasklet.setProperties(Collections.singletonMap("app.foo." + TaskLauncherTasklet.IGNORE_EXIT_MESSAGE_PROPERTY, "true")); + ChunkContext chunkContext = chunkContext(); + mockReturnValForTaskExecution(1L); + execute(taskLauncherTasklet, null, chunkContext); + Assertions.assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("task-execution-id")).isEqualTo(1L); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); + Assertions.assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .containsKey(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE)).isTrue(); + } + + @Test + @DirtiesContext + public void testTaskLauncherTaskletIgnoreExitMessageViaCommandLineOverride() { + createCompleteTaskExecution(0); + + TaskLauncherTasklet taskLauncherTasklet = + getTaskExecutionTasklet(); + taskLauncherTasklet.setArguments(Collections.singletonList("--ignoreExitMessage=false")); + taskLauncherTasklet.setProperties(Collections.singletonMap("app.foo." + TaskLauncherTasklet.IGNORE_EXIT_MESSAGE_PROPERTY, "true")); + ChunkContext chunkContext = chunkContext(); + mockReturnValForTaskExecution(1L); + execute(taskLauncherTasklet, null, chunkContext); + Assertions.assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("task-execution-id")).isEqualTo(1L); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); + boolean value = chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .containsKey(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE); + Assertions.assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .containsKey(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE)).isTrue(); + Assertions.assertThat((Boolean)chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE)).isFalse(); + } + @Test public void testTaskOperationsConfiguredWithMissingUsername() { @@ -327,7 +466,7 @@ public void testTaskOperationsConfiguredWithMissingUsername() { TaskLauncherTasklet taskLauncherTasklet = new TaskLauncherTasklet(null, null, this.taskExplorer, composedTaskProperties, - TASK_NAME, new TaskProperties()); + TASK_NAME, new TaskProperties(), environment, mapper); taskLauncherTasklet.taskOperations(); } catch (IllegalArgumentException e) { @@ -336,9 +475,16 @@ public void testTaskOperationsConfiguredWithMissingUsername() { } fail("Expected an IllegalArgumentException to be thrown"); } + private void createCompleteTaskExecution(int exitCode, String... message) { + TaskExecution taskExecution = this.taskRepository.createTaskExecution(); + this.taskRepository.completeTaskExecution(taskExecution.getExecutionId(), + exitCode, new Date(), message != null && message.length > 0 ? message[0] : ""); + } - private void createCompleteTaskExecution(int exitCode) { + private void createAndStartCompleteTaskExecution(int exitCode, JobExecution jobExecution) { TaskExecution taskExecution = this.taskRepository.createTaskExecution(); + JdbcTaskBatchDao taskBatchDao = new JdbcTaskBatchDao(this.dataSource); + taskBatchDao.saveRelationship(taskExecution, jobExecution); this.taskRepository.completeTaskExecution(taskExecution.getExecutionId(), exitCode, new Date(), ""); } @@ -356,7 +502,7 @@ private TaskLauncherTasklet getTaskExecutionTasklet() { private TaskLauncherTasklet getTaskExecutionTasklet(TaskProperties taskProperties) { TaskLauncherTasklet taskLauncherTasklet = new TaskLauncherTasklet(null, null, this.taskExplorer, this.composedTaskProperties, - TASK_NAME, taskProperties); + TASK_NAME, taskProperties, environment, mapper); ReflectionTestUtils.setField(taskLauncherTasklet, "taskOperations", this.taskOperations); return taskLauncherTasklet; } @@ -371,9 +517,11 @@ private ChunkContext chunkContext () StepContext stepContext = new StepContext(stepExecution); return new ChunkContext(stepContext); } - private void mockReturnValForTaskExecution(long executionId) { - Mockito.doReturn(executionId) + mockReturnValForTaskExecution(executionId, SchemaVersionTarget.defaultTarget().getName()); + } + private void mockReturnValForTaskExecution(long executionId, String schemaTarget) { + Mockito.doReturn(new LaunchResponseResource(executionId, schemaTarget)) .when(this.taskOperations) .launch(ArgumentMatchers.anyString(), ArgumentMatchers.any(), @@ -403,8 +551,10 @@ public TestTaskLauncherTasklet( OAuth2AccessTokenResponseClient clientCredentialsTokenResponseClient, TaskExplorer taskExplorer, ComposedTaskProperties composedTaskProperties, String taskName, - TaskProperties taskProperties) { - super(clientRegistrations, clientCredentialsTokenResponseClient,taskExplorer,composedTaskProperties,taskName,taskProperties); + TaskProperties taskProperties, + Environment environment, + ObjectMapper mapper) { + super(clientRegistrations, clientCredentialsTokenResponseClient,taskExplorer,composedTaskProperties,taskName,taskProperties, environment, mapper); } @Override diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskPropertiesTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskPropertiesTests.java index f12cb96681..af57f49b8b 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskPropertiesTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskPropertiesTests.java @@ -28,6 +28,7 @@ import org.springframework.cloud.dataflow.core.Base64Utils; import org.springframework.core.env.StandardEnvironment; import org.springframework.core.env.SystemEnvironmentPropertySource; +import org.springframework.security.oauth2.core.ClientAuthenticationMethod; import static org.assertj.core.api.Assertions.assertThat; @@ -46,6 +47,7 @@ public void testGettersAndSetters() throws URISyntaxException{ properties.setComposedTaskArguments("bbb"); properties.setIntervalTimeBetweenChecks(12345); properties.setMaxWaitTime(6789); + properties.setMaxStartWaitTime(101112); properties.setDataflowServerUri(new URI("http://test")); properties.setGraph("ddd"); properties.setDataflowServerUsername("foo"); @@ -56,6 +58,7 @@ public void testGettersAndSetters() throws URISyntaxException{ assertThat(properties.getComposedTaskArguments()).isEqualTo("bbb"); assertThat(properties.getIntervalTimeBetweenChecks()).isEqualTo(12345); assertThat(properties.getMaxWaitTime()).isEqualTo(6789); + assertThat(properties.getMaxStartWaitTime()).isEqualTo(101112); assertThat(properties.getDataflowServerUri().toString()).isEqualTo("http://test"); assertThat(properties.getGraph()).isEqualTo("ddd"); assertThat(properties.getDataflowServerUsername()).isEqualTo("foo"); @@ -121,6 +124,18 @@ public void testComposedTaskAppArguments() { }); } + @Test + public void testAssignmentOfOauth2ClientCredentialsClientAuthenticationMethod(){ + this.contextRunner + .withSystemProperties("OAUTH2_CLIENT_CREDENTIALS_CLIENT_AUTHENTICATION_METHOD=POST") + .withUserConfiguration(Config1.class).run((context) -> { + ComposedTaskProperties properties = context.getBean(ComposedTaskProperties.class); + assertThat(properties.getOauth2ClientCredentialsClientAuthenticationMethod()) + .withFailMessage("The OAuth2 client credentials client authentication method couldn't be assigned correctly.") + .isEqualTo(ClientAuthenticationMethod.POST); + }); + } + @EnableConfigurationProperties({ ComposedTaskProperties.class }) private static class Config1 { } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/support/OnOAuth2ClientCredentialsEnabledTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/support/OnOAuth2ClientCredentialsEnabledTests.java index 707a96c31f..c6245cd01f 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/support/OnOAuth2ClientCredentialsEnabledTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/support/OnOAuth2ClientCredentialsEnabledTests.java @@ -24,7 +24,7 @@ import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; -import static org.hamcrest.Matchers.equalTo; +import static org.assertj.core.api.Assertions.assertThat; /** * @author Gunnar Hillert @@ -43,19 +43,19 @@ public void teardown() { @Test public void noPropertySet() throws Exception { this.context = load(Config.class); - org.hamcrest.MatcherAssert.assertThat(context.containsBean("myBean"), equalTo(false)); + assertThat(context.containsBean("myBean")).isEqualTo(false); } @Test public void propertyClientId() throws Exception { this.context = load(Config.class, "oauth2-client-credentials-client-id:12345"); - org.hamcrest.MatcherAssert.assertThat(context.containsBean("myBean"), equalTo(true)); + assertThat(context.containsBean("myBean")).isEqualTo(true); } @Test public void clientIdOnlyWithNoValue() throws Exception { this.context = load(Config.class, "oauth2-client-credentials-client-id:"); - org.hamcrest.MatcherAssert.assertThat(context.containsBean("myBean"), equalTo(false)); + assertThat(context.containsBean("myBean")).isEqualTo(false); } private AnnotationConfigApplicationContext load(Class config, String... env) { diff --git a/spring-cloud-dataflow-configuration-metadata/pom.xml b/spring-cloud-dataflow-configuration-metadata/pom.xml index f93abc3e72..e239bf8097 100644 --- a/spring-cloud-dataflow-configuration-metadata/pom.xml +++ b/spring-cloud-dataflow-configuration-metadata/pom.xml @@ -4,16 +4,23 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-configuration-metadata + spring-cloud-dataflow-configuration-metadata + Spring Cloud Data Flow Configuration Metadata + UTF-8 + true + 3.4.1 org.springframework.cloud spring-cloud-dataflow-container-registry + ${project.version} org.springframework.boot @@ -59,4 +66,45 @@ test + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolver.java b/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolver.java index 812441a0e9..01c182acbd 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolver.java +++ b/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolver.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -74,12 +74,6 @@ public class BootApplicationConfigurationMetadataResolver extends ApplicationCon private static final String CONFIGURATION_METADATA_PATTERN = "classpath*:/META-INF/spring-configuration-metadata.json"; - // this is superseded by name prefixed with dataflow and will get removed in future - private static final String DEPRECATED_SPRING_CONFIGURATION_PROPERTIES = "classpath*:/META-INF/spring-configuration-metadata-whitelist.properties"; - - // this is superseded by VISIBLE_PROPERTIES - private static final String DEPRECATED_DATAFLOW_CONFIGURATION_PROPERTIES = "classpath*:/META-INF/dataflow-configuration-metadata-whitelist.properties"; - private static final String VISIBLE_PROPERTIES = "classpath*:/META-INF/dataflow-configuration-metadata.properties"; private static final String PORT_MAPPING_PROPERTIES = "classpath*:/META-INF/dataflow-configuration-port-mapping.properties"; @@ -130,25 +124,9 @@ public BootApplicationConfigurationMetadataResolver(ClassLoader parent, private static Resource[] visibleConfigurationMetadataResources(ClassLoader classLoader) throws IOException { ResourcePatternResolver resourcePatternResolver = new PathMatchingResourcePatternResolver(classLoader); Resource[] configurationResources = resourcePatternResolver.getResources(VISIBLE_PROPERTIES); - - Resource[] deprecatedSpringConfigurationResources = resourcePatternResolver - .getResources(DEPRECATED_SPRING_CONFIGURATION_PROPERTIES); - if (deprecatedSpringConfigurationResources.length > 0) { - logger.warn("The use of " + DEPRECATED_SPRING_CONFIGURATION_PROPERTIES + " is a deprecated. Please use " - + VISIBLE_PROPERTIES + " instead."); - } - Resource[] deprecatedDataflowConfigurationResources = resourcePatternResolver - .getResources(DEPRECATED_DATAFLOW_CONFIGURATION_PROPERTIES); - if (deprecatedDataflowConfigurationResources.length > 0) { - logger.warn("The use of " + DEPRECATED_DATAFLOW_CONFIGURATION_PROPERTIES - + " is a deprecated. Please use " + VISIBLE_PROPERTIES + " instead."); - } - Resource[] portMappingResources = resourcePatternResolver.getResources(PORT_MAPPING_PROPERTIES); Resource[] groupingResources = resourcePatternResolver.getResources(OPTION_GROUPS_PROPERTIES); - return concatArrays(configurationResources, deprecatedSpringConfigurationResources, - deprecatedDataflowConfigurationResources, portMappingResources, groupingResources); - + return concatArrays(configurationResources, portMappingResources, groupingResources); } private static Resource[] concatArrays(final Resource[]... arrays) { diff --git a/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/container/DefaultContainerImageMetadataResolver.java b/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/container/DefaultContainerImageMetadataResolver.java index 0af21b76cb..0ce5522293 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/container/DefaultContainerImageMetadataResolver.java +++ b/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/container/DefaultContainerImageMetadataResolver.java @@ -20,6 +20,7 @@ import java.util.Map; import org.springframework.cloud.dataflow.container.registry.ContainerRegistryException; +import org.springframework.cloud.dataflow.container.registry.ContainerRegistryProperties; import org.springframework.cloud.dataflow.container.registry.ContainerRegistryRequest; import org.springframework.cloud.dataflow.container.registry.ContainerRegistryService; import org.springframework.util.StringUtils; @@ -30,6 +31,7 @@ * * @author Christian Tzolov * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ public class DefaultContainerImageMetadataResolver implements ContainerImageMetadataResolver { @@ -39,6 +41,7 @@ public DefaultContainerImageMetadataResolver(ContainerRegistryService containerR this.containerRegistryService = containerRegistryService; } + @SuppressWarnings("unchecked") @Override public Map getImageLabels(String imageName) { @@ -48,12 +51,23 @@ public Map getImageLabels(String imageName) { ContainerRegistryRequest registryRequest = this.containerRegistryService.getRegistryRequest(imageName); - Map manifest = this.containerRegistryService.getImageManifest(registryRequest, Map.class); - - if (manifest != null && !isNotNullMap(manifest.get("config"))) { - throw new ContainerRegistryException( - String.format("Image [%s] has incorrect or missing manifest config element: %s", - imageName, manifest.toString())); + Map manifest = this.containerRegistryService.getImageManifest(registryRequest, Map.class); + + if (manifest != null && manifest.get("config") == null) { + // when both Docker and OCI images are stored in repository the response for OCI image when using Docker manifest type will not contain config. + // In the case where we don't receive a config and schemaVersion is less than 2 we try OCI manifest type. + String manifestMediaType = registryRequest.getRegistryConf().getManifestMediaType(); + if (asInt(manifest.get("schemaVersion")) < 2 + && !manifestMediaType.equals(ContainerRegistryProperties.OCI_IMAGE_MANIFEST_MEDIA_TYPE)) { + registryRequest.getRegistryConf() + .setManifestMediaType(ContainerRegistryProperties.OCI_IMAGE_MANIFEST_MEDIA_TYPE); + manifest = this.containerRegistryService.getImageManifest(registryRequest, Map.class); + } + if (manifest.get("config") == null) { + String message = String.format("Image [%s] has incorrect or missing manifest config element: %s", + imageName, manifest); + throw new ContainerRegistryException(message); + } } if (manifest != null) { String configDigest = ((Map) manifest.get("config")).get("digest"); @@ -85,12 +99,24 @@ public Map getImageLabels(String imageName) { (Map) configElement.get("Labels") : Collections.emptyMap(); } else { - throw new ContainerRegistryException( - String.format("Image [%s] is missing manifest", imageName)); + throw new ContainerRegistryException(String.format("Image [%s] is missing manifest", imageName)); + } + } + + private static int asInt(Object value) { + if (value instanceof Number) { + return ((Number) value).intValue(); + } + else if (value instanceof String) { + return Integer.parseInt((String) value); + } + else if (value != null) { + return Integer.parseInt(value.toString()); } + return 0; } - private boolean isNotNullMap(Object object) { - return object != null && (object instanceof Map); + private static boolean isNotNullMap(Object object) { + return object instanceof Map; } } diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolverAutoConfigurationTest.java b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolverAutoConfigurationTest.java index 180885ffaf..b624bf698d 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolverAutoConfigurationTest.java +++ b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolverAutoConfigurationTest.java @@ -23,8 +23,7 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; @@ -41,14 +40,14 @@ import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.web.client.HttpClientErrorException; import org.springframework.web.client.RestTemplate; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyMap; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -56,8 +55,7 @@ /** * @author Christian Tzolov */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(classes = ApplicationConfigurationMetadataResolverAutoConfigurationTest.TestConfig.class) +@SpringJUnitConfig(classes = ApplicationConfigurationMetadataResolverAutoConfigurationTest.TestConfig.class) @TestPropertySource(properties = { ".dockerconfigjson={\"auths\":{\"demo.repository.io\":{\"username\":\"testuser\",\"password\":\"testpassword\",\"auth\":\"YWRtaW46SGFyYm9yMTIzNDU=\"}" + ",\"demo2.repository.io\":{\"username\":\"testuser\",\"password\":\"testpassword\",\"auth\":\"YWRtaW46SGFyYm9yMTIzNDU=\"}}}", @@ -215,10 +213,18 @@ public ContainerImageRestTemplateFactory containerImageRestTemplateFactory( @Qualifier("containerRestTemplate") RestTemplate containerRestTemplate, @Qualifier("containerRestTemplateWithHttpProxy") RestTemplate containerRestTemplateWithHttpProxy) { ContainerImageRestTemplateFactory containerImageRestTemplateFactory = Mockito.mock(ContainerImageRestTemplateFactory.class); - when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(true), eq(false))).thenReturn(noSslVerificationContainerRestTemplate); - when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(true), eq(true))).thenReturn(noSslVerificationContainerRestTemplateWithHttpProxy); - when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(false), eq(false))).thenReturn(containerRestTemplate); - when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(false), eq(true))).thenReturn(containerRestTemplateWithHttpProxy); + when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(true), eq(false), + anyMap())) + .thenReturn(noSslVerificationContainerRestTemplate); + when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(true), eq(true), + anyMap())) + .thenReturn(noSslVerificationContainerRestTemplateWithHttpProxy); + when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(false), eq(false), + anyMap())) + .thenReturn(containerRestTemplate); + when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(false), eq(true), + anyMap())) + .thenReturn(containerRestTemplateWithHttpProxy); return containerImageRestTemplateFactory; } diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolverTests.java b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolverTests.java index fece9e5021..8d7648c97a 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolverTests.java +++ b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolverTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,9 +23,9 @@ import java.util.Map; import java.util.Set; -import org.hamcrest.Matcher; -import org.junit.Before; -import org.junit.Test; +import org.assertj.core.api.Condition; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -35,12 +35,7 @@ import org.springframework.core.io.ClassPathResource; import org.springframework.util.StreamUtils; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.hasItem; -import static org.hamcrest.Matchers.hasProperty; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.when; /** @@ -57,7 +52,7 @@ public class BootApplicationConfigurationMetadataResolverTests { private ApplicationConfigurationMetadataResolver resolver; - @Before + @BeforeEach public void init() { MockitoAnnotations.initMocks(this); resolver = new BootApplicationConfigurationMetadataResolver(containerImageMetadataResolver); @@ -68,7 +63,7 @@ public void appDockerResourceEmptyLabels() { when(containerImageMetadataResolver.getImageLabels("test/test:latest")).thenReturn(new HashMap<>()); List properties = resolver .listProperties(new DockerResource("test/test:latest")); - assertThat(properties.size(), is(0)); + assertThat(properties).isEmpty(); } @Test @@ -82,7 +77,7 @@ public void appDockerResource() throws IOException { new String(bytes))); List properties = resolver .listProperties(new DockerResource("test/test:latest")); - assertThat(properties.size(), is(3)); + assertThat(properties).hasSize(3); } @Test @@ -94,44 +89,24 @@ public void appDockerResourceBrokenFormat() { when(containerImageMetadataResolver.getImageLabels("test/test:latest")).thenReturn(result); List properties = resolver .listProperties(new DockerResource("test/test:latest")); - assertThat(properties.size(), is(0)); + assertThat(properties).isEmpty(); } @Test public void appSpecificVisiblePropsShouldBeVisible() { List properties = resolver .listProperties(new ClassPathResource("apps/filter-processor", getClass())); - assertThat(properties, hasItem(configPropertyIdentifiedAs("filter.expression"))); - assertThat(properties, hasItem(configPropertyIdentifiedAs("some.other.property.included.prefix.expresso2"))); - } - - @Test - public void appSpecificVisibleLegacyPropsShouldBeVisible() { - List properties = resolver - .listProperties(new ClassPathResource("apps/filter-processor-legacy", getClass())); - assertThat(properties, hasItem(configPropertyIdentifiedAs("filter.expression"))); - assertThat(properties, hasItem(configPropertyIdentifiedAs("some.other.property.included.prefix.expresso2"))); - } - - @Test - public void appSpecificVisibleLegacyPropsShouldBeVisibleIfBothInPlace() { - // test resource files has both expresso2 and expresso3 in spring-configuration-metadata - // and as we prefer new format(expresso3 included) and it exists - // expresso2 from old format doesn't get read. - List properties = resolver - .listProperties(new ClassPathResource("apps/filter-processor-both", getClass())); - assertThat(properties, hasItem(configPropertyIdentifiedAs("filter.expression"))); - assertThat(properties, hasItem(configPropertyIdentifiedAs("some.other.property.included.prefix.expresso3"))); - assertThat(properties, hasItem(configPropertyIdentifiedAs("some.other.property.included.prefix.expresso2"))); + assertThat(properties).haveAtLeastOne(configPropertyIdentifiedAs("filter.expression")); + assertThat(properties).haveAtLeastOne(configPropertyIdentifiedAs("some.other.property.included.prefix.expresso2")); } @Test public void otherPropertiesShouldOnlyBeVisibleInExtensiveCall() { List properties = resolver .listProperties(new ClassPathResource("apps/filter-processor", getClass())); - assertThat(properties, not(hasItem(configPropertyIdentifiedAs("some.prefix.hidden.by.default.secret")))); + assertThat(properties).doNotHave(configPropertyIdentifiedAs("some.prefix.hidden.by.default.secret")); properties = resolver.listProperties(new ClassPathResource("apps/filter-processor", getClass()), true); - assertThat(properties, hasItem(configPropertyIdentifiedAs("some.prefix.hidden.by.default.secret"))); + assertThat(properties).haveAtLeastOne(configPropertyIdentifiedAs("some.prefix.hidden.by.default.secret")); } @Test @@ -140,8 +115,8 @@ public void shouldReturnEverythingWhenNoDescriptors() { .listProperties(new ClassPathResource("apps/no-visible-properties", getClass())); List full = resolver .listProperties(new ClassPathResource("apps/no-visible-properties", getClass()), true); - assertThat(properties.size(), is(0)); - assertThat(full.size(), is(3)); + assertThat(properties).isEmpty(); + assertThat(full).hasSize(3); } @Test @@ -150,31 +125,31 @@ public void deprecatedErrorPropertiesShouldNotBeVisible() { .listProperties(new ClassPathResource("apps/deprecated-error", getClass())); List full = resolver .listProperties(new ClassPathResource("apps/deprecated-error", getClass()), true); - assertThat(properties.size(), is(0)); - assertThat(full.size(), is(2)); + assertThat(properties).isEmpty(); + assertThat(full).hasSize(2); } @Test public void shouldReturnPortMappingProperties() { Map> portNames = resolver.listPortNames(new ClassPathResource("apps/filter-processor", getClass())); - assertThat(portNames.size(), is(2)); - assertThat(portNames.get("inbound").size(), is(3)); - assertThat(portNames.get("inbound"), containsInAnyOrder("in1", "in2", "in3")); - assertThat(portNames.get("outbound").size(), is(2)); - assertThat(portNames.get("outbound"), containsInAnyOrder("out1", "out2")); + assertThat(portNames).hasSize(2); + assertThat(portNames.get("inbound")).hasSize(3); + assertThat(portNames.get("inbound")).containsExactlyInAnyOrder("in1", "in2", "in3"); + assertThat(portNames.get("outbound")).hasSize(2); + assertThat(portNames.get("outbound")).containsExactlyInAnyOrder("out1", "out2"); } @Test public void shouldReturnOptionGroupsProperties() { Map> optionGroups = resolver.listOptionGroups(new ClassPathResource("apps/filter-processor", getClass())); - assertThat(optionGroups.size(), is(4)); - assertThat(optionGroups.get("g1").size(), is(3)); - assertThat(optionGroups.get("g1"), containsInAnyOrder("foo1.bar1", "foo1.bar2", "foo1.bar3")); - assertThat(optionGroups.get("g2").size(), is(0)); - assertThat(optionGroups.get("g1.sb1").size(), is(1)); - assertThat(optionGroups.get("g1.sb1"), containsInAnyOrder("foo2.bar1")); - assertThat(optionGroups.get("g1.sb2").size(), is(2)); - assertThat(optionGroups.get("g1.sb2"), containsInAnyOrder("foo3.bar1", "foo3.bar2")); + assertThat(optionGroups).hasSize(4); + assertThat(optionGroups.get("g1")).hasSize(3); + assertThat(optionGroups.get("g1")).containsExactlyInAnyOrder("foo1.bar1", "foo1.bar2", "foo1.bar3"); + assertThat(optionGroups.get("g2")).isEmpty(); + assertThat(optionGroups.get("g1.sb1")).hasSize(1); + assertThat(optionGroups.get("g1.sb1")).containsExactly("foo2.bar1"); + assertThat(optionGroups.get("g1.sb2")).hasSize(2); + assertThat(optionGroups.get("g1.sb2")).containsExactlyInAnyOrder("foo3.bar1", "foo3.bar2"); } @Test @@ -184,15 +159,15 @@ public void appDockerResourceWithInboundOutboundPortMapping() { result.put("configuration-properties.outbound-ports", "output1, output2"); when(this.containerImageMetadataResolver.getImageLabels("test/test:latest")).thenReturn(result); Map> portNames = this.resolver.listPortNames(new DockerResource("test/test:latest")); - assertThat(portNames.size(), is(2)); - assertThat(portNames.get("inbound").size(), is(3)); - assertThat(portNames.get("inbound"), containsInAnyOrder("input1", "input2", "input3")); - assertThat(portNames.get("outbound").size(), is(2)); - assertThat(portNames.get("outbound"), containsInAnyOrder("output1", "output2")); + assertThat(portNames).hasSize(2); + assertThat(portNames.get("inbound")).hasSize(3); + assertThat(portNames.get("inbound")).containsExactlyInAnyOrder("input1", "input2", "input3"); + assertThat(portNames.get("outbound")).hasSize(2); + assertThat(portNames.get("outbound")).containsExactlyInAnyOrder("output1", "output2"); } - private Matcher configPropertyIdentifiedAs(String name) { - return hasProperty("id", is(name)); + private Condition configPropertyIdentifiedAs(String name) { + return new Condition<>(item -> item.getId().equals(name), "configPropertyIdentifiedAs"); } } diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/DefaultContainerImageMetadataResolverTest.java b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/DefaultContainerImageMetadataResolverTest.java index be870752fb..2bdfa8bee8 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/DefaultContainerImageMetadataResolverTest.java +++ b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/DefaultContainerImageMetadataResolverTest.java @@ -16,15 +16,15 @@ package org.springframework.cloud.dataflow.container.registry; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentMatcher; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -34,16 +34,19 @@ import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.util.StringUtils; import org.springframework.web.client.RestTemplate; import org.springframework.web.util.UriComponents; import org.springframework.web.util.UriComponentsBuilder; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyMap; +import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -64,11 +67,11 @@ public class DefaultContainerImageMetadataResolverTest { private ContainerRegistryService containerRegistryService; - @Before + @BeforeEach public void init() { MockitoAnnotations.initMocks(this); - when(containerImageRestTemplateFactory.getContainerRestTemplate(anyBoolean(), anyBoolean())).thenReturn(mockRestTemplate); + when(containerImageRestTemplateFactory.getContainerRestTemplate(anyBoolean(), anyBoolean(), anyMap())).thenReturn(mockRestTemplate); // DockerHub registry configuration by default. ContainerRegistryConfiguration dockerHubAuthConfig = new ContainerRegistryConfiguration(); @@ -88,13 +91,15 @@ public void init() { when(registryAuthorizer.getAuthorizationHeaders(any(ContainerImage.class), any())).thenReturn(new HttpHeaders()); this.containerRegistryService = new ContainerRegistryService(containerImageRestTemplateFactory, - new ContainerImageParser(), registryConfigurationMap, Arrays.asList(registryAuthorizer)); + new ContainerImageParser(), registryConfigurationMap, Collections.singletonList(registryAuthorizer)); } - @Test(expected = ContainerRegistryException.class) + @Test public void getImageLabelsInvalidImageName() { - DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); - resolver.getImageLabels(null); + assertThatExceptionOfType(ContainerRegistryException.class).isThrownBy(() -> { + DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); + resolver.getImageLabels(null); + }); } @Test @@ -109,8 +114,8 @@ public void getImageLabels() throws JsonProcessingException { "registry-1.docker.io", null, "test/image", "123"); Map labels = resolver.getImageLabels("test/image:latest"); - assertThat(labels.size(), is(1)); - assertThat(labels.get("boza"), is("koza")); + assertThat(labels).hasSize(1); + assertThat(labels.get("boza")).isEqualTo("koza"); } @Test @@ -125,61 +130,69 @@ public void getImageLabelsFromPrivateRepository() throws JsonProcessingException "my-private-repository.com", "5000", "test/image", "123"); Map labels = resolver.getImageLabels("my-private-repository.com:5000/test/image:latest"); - assertThat(labels.size(), is(1)); - assertThat(labels.get("boza"), is("koza")); + assertThat(labels).hasSize(1); + assertThat(labels.get("boza")).isEqualTo("koza"); } - @Test(expected = ContainerRegistryException.class) + @Test public void getImageLabelsMissingRegistryConfiguration() { - DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); - resolver.getImageLabels("somehost:8083/test/image:latest"); + assertThatExceptionOfType(ContainerRegistryException.class).isThrownBy(() -> { + DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); + resolver.getImageLabels("somehost:8083/test/image:latest"); + }); } - @Test(expected = ContainerRegistryException.class) + @Test public void getImageLabelsMissingRegistryAuthorizer() { + assertThatExceptionOfType(ContainerRegistryException.class).isThrownBy(() -> { + DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver( + new ContainerRegistryService(containerImageRestTemplateFactory, + new ContainerImageParser(), registryConfigurationMap, Collections.emptyList())); - DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver( - new ContainerRegistryService(containerImageRestTemplateFactory, - new ContainerImageParser(), registryConfigurationMap, Collections.emptyList())); - - resolver.getImageLabels("test/image:latest"); + resolver.getImageLabels("test/image:latest"); + }); } - @Test(expected = ContainerRegistryException.class) + @Test public void getImageLabelsMissingAuthorizationHeader() { - RegistryAuthorizer registryAuthorizer = mock(RegistryAuthorizer.class); + assertThatExceptionOfType(ContainerRegistryException.class).isThrownBy(() -> { + RegistryAuthorizer registryAuthorizer = mock(RegistryAuthorizer.class); - when(registryAuthorizer.getType()).thenReturn(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2); - when(registryAuthorizer.getAuthorizationHeaders(any(ContainerImage.class), any())).thenReturn(null); + when(registryAuthorizer.getType()).thenReturn(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2); + when(registryAuthorizer.getAuthorizationHeaders(any(ContainerImage.class), any())).thenReturn(null); - DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver( - new ContainerRegistryService(containerImageRestTemplateFactory, new ContainerImageParser(), registryConfigurationMap, Arrays.asList(registryAuthorizer))); + DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver( + new ContainerRegistryService(containerImageRestTemplateFactory, new ContainerImageParser(), registryConfigurationMap, Collections.singletonList(registryAuthorizer))); - resolver.getImageLabels("test/image:latest"); + resolver.getImageLabels("test/image:latest"); + }); } - @Test(expected = ContainerRegistryException.class) + @Test public void getImageLabelsInvalidManifestResponse() { + assertThatExceptionOfType(ContainerRegistryException.class).isThrownBy(() -> { + DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); - DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); - - Map manifestResponseWithoutConfig = Collections.emptyMap(); - mockManifestRestTemplateCall(manifestResponseWithoutConfig, "registry-1.docker.io", - null, "test/image", "latest"); + Map manifestResponseWithoutConfig = Collections.emptyMap(); + mockManifestRestTemplateCall(manifestResponseWithoutConfig, "registry-1.docker.io", + null, "test/image", "latest"); - resolver.getImageLabels("test/image:latest"); + resolver.getImageLabels("test/image:latest"); + }); } - @Test(expected = ContainerRegistryException.class) + @Test public void getImageLabelsInvalidDigest() { - DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); + assertThatExceptionOfType(ContainerRegistryException.class).isThrownBy(() -> { + DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); - String emptyDigest = ""; - Map manifestResponse = Collections.singletonMap("config", Collections.singletonMap("digest", emptyDigest)); - mockManifestRestTemplateCall(manifestResponse, "registry-1.docker.io", null, - "test/image", "latest"); + String emptyDigest = ""; + Map manifestResponse = Collections.singletonMap("config", Collections.singletonMap("digest", emptyDigest)); + mockManifestRestTemplateCall(manifestResponse, "registry-1.docker.io", null, + "test/image", "latest"); - resolver.getImageLabels("test/image:latest"); + resolver.getImageLabels("test/image:latest"); + }); } @Test @@ -195,7 +208,26 @@ public void getImageLabelsWithInvalidLabels() throws JsonProcessingException { "registry-1.docker.io", null, "test/image", "123"); Map labels = resolver.getImageLabels("test/image:latest"); - assertThat(labels.size(), is(0)); + assertThat(labels).isEmpty(); + } + + @Test + public void getImageLabelsWithMixedOCIResponses() throws JsonProcessingException { + DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver( + this.containerRegistryService); + String ociInCompatible = "{\"schemaVersion\": 1,\"name\": \"test/image\"}"; + String ociCompatible = "{\"schemaVersion\": 2,\"mediaType\": \"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\": \"application/vnd.oci.image.config.v1+json\",\"digest\": \"sha256:efc06d6096cc88697e477abb0b3479557e1bec688c36813383f1a8581f87d9f8\",\"size\": 34268}}"; + mockManifestRestTemplateCallAccepts(ociInCompatible, "my-private-repository.com", "5000", "test/image", + "latest", ContainerRegistryProperties.DOCKER_IMAGE_MANIFEST_MEDIA_TYPE); + mockManifestRestTemplateCallAccepts(ociCompatible, "my-private-repository.com", "5000", "test/image", "latest", + ContainerRegistryProperties.OCI_IMAGE_MANIFEST_MEDIA_TYPE); + String blobResponse = "{\"config\": {\"Labels\": {\"boza\": \"koza\"}}}"; + mockBlogRestTemplateCall(blobResponse, "my-private-repository.com", "5000", "test/image", + "sha256:efc06d6096cc88697e477abb0b3479557e1bec688c36813383f1a8581f87d9f8"); + + Map labels = resolver.getImageLabels("my-private-repository.com:5000/test/image:latest"); + assertThat(labels).isNotEmpty(); + assertThat(labels).containsEntry("boza", "koza"); } private void mockManifestRestTemplateCall(Map mapToReturn, String registryHost, @@ -234,6 +266,39 @@ private void mockBlogRestTemplateCall(String jsonResponse, String registryHost, .thenReturn(new ResponseEntity<>(new ObjectMapper().readValue(jsonResponse, Map.class), HttpStatus.OK)); } + private void mockManifestRestTemplateCallAccepts(String jsonResponse, String registryHost, String registryPort, + String repository, String tagOrDigest, String accepts) throws JsonProcessingException { + + UriComponents blobUriComponents = UriComponentsBuilder.newInstance() + .scheme("https") + .host(registryHost) + .port(StringUtils.hasText(registryPort) ? registryPort : null) + .path("v2/{repository}/manifests/{reference}") + .build() + .expand(repository, tagOrDigest); + + MediaType mediaType = new MediaType(org.apache.commons.lang3.StringUtils.substringBefore(accepts, "/"), + org.apache.commons.lang3.StringUtils.substringAfter(accepts, "/")); + when(mockRestTemplate.exchange(eq(blobUriComponents.toUri()), eq(HttpMethod.GET), + argThat(new HeaderAccepts(mediaType)), eq(Map.class))) + .thenReturn(new ResponseEntity<>(new ObjectMapper().readValue(jsonResponse, Map.class), HttpStatus.OK)); + } + + static class HeaderAccepts implements ArgumentMatcher> { + + private final MediaType accepts; + + public HeaderAccepts(MediaType accepts) { + this.accepts = accepts; + } + + @Override + public boolean matches(HttpEntity argument) { + return argument.getHeaders().getAccept().contains(accepts); + } + + } + private class MockedDefaultContainerImageMetadataResolver extends DefaultContainerImageMetadataResolver { public MockedDefaultContainerImageMetadataResolver(ContainerRegistryService containerRegistryService) { super(containerRegistryService); diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverterTest.java b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverterTest.java index 37837b6678..ee8f938bf1 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverterTest.java +++ b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverterTest.java @@ -21,8 +21,9 @@ import java.util.HashMap; import java.util.Map; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -37,11 +38,10 @@ import org.springframework.web.client.RestTemplate; import static org.assertj.core.api.Assertions.assertThat; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; + import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyMap; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.when; @@ -58,10 +58,10 @@ public class DockerConfigJsonSecretToRegistryConfigurationConverterTest { private DockerConfigJsonSecretToRegistryConfigurationConverter converter; - @Before + @BeforeEach public void init() { MockitoAnnotations.initMocks(this); - when(containerImageRestTemplateFactory.getContainerRestTemplate(anyBoolean(), anyBoolean())).thenReturn(mockRestTemplate); + when(containerImageRestTemplateFactory.getContainerRestTemplate(anyBoolean(), anyBoolean(), anyMap())).thenReturn(mockRestTemplate); converter = new DockerConfigJsonSecretToRegistryConfigurationConverter(new ContainerRegistryProperties(), containerImageRestTemplateFactory); } @@ -75,15 +75,15 @@ public void testConvertAnonymousRegistry() throws URISyntaxException { String b = "{\"auths\":{\"demo.repository.io\":{}}}"; Map result = converter.convert(b); - assertThat(result.size(), is(1)); + assertThat(result).hasSize(1); assertThat(result.containsKey("demo.repository.io")).isTrue(); ContainerRegistryConfiguration registryConfiguration = result.get("demo.repository.io"); - assertThat(registryConfiguration.getRegistryHost(), is("demo.repository.io")); - assertThat(registryConfiguration.getUser(), nullValue()); - assertThat(registryConfiguration.getSecret(), nullValue()); - assertThat(registryConfiguration.getAuthorizationType(), is(ContainerRegistryConfiguration.AuthorizationType.anonymous)); + assertThat(registryConfiguration.getRegistryHost()).isEqualTo("demo.repository.io"); + assertThat(registryConfiguration.getUser()).isNull(); + assertThat(registryConfiguration.getSecret()).isNull(); + assertThat(registryConfiguration.getAuthorizationType()).isEqualTo(ContainerRegistryConfiguration.AuthorizationType.anonymous); } @Test @@ -96,15 +96,15 @@ public void testConvertBasicAuthRegistry() throws URISyntaxException { String b = "{\"auths\":{\"demo.repository.io\":{\"username\":\"testuser\",\"password\":\"testpassword\",\"auth\":\"YWRtaW46SGFyYm9yMTIzNDU=\"}}}"; Map result = converter.convert(b); - assertThat(result.size(), is(1)); - assertThat(result.containsKey("demo.repository.io")).isTrue(); + assertThat(result).hasSize(1); + assertThat(result).containsKey("demo.repository.io"); ContainerRegistryConfiguration registryConfiguration = result.get("demo.repository.io"); - assertThat(registryConfiguration.getRegistryHost(), is("demo.repository.io")); - assertThat(registryConfiguration.getUser(), is("testuser")); - assertThat(registryConfiguration.getSecret(), is("testpassword")); - assertThat(registryConfiguration.getAuthorizationType(), is(ContainerRegistryConfiguration.AuthorizationType.basicauth)); + assertThat(registryConfiguration.getRegistryHost()).isEqualTo("demo.repository.io"); + assertThat(registryConfiguration.getUser()).isEqualTo("testuser"); + assertThat(registryConfiguration.getSecret()).isEqualTo("testpassword"); + assertThat(registryConfiguration.getAuthorizationType()).isEqualTo(ContainerRegistryConfiguration.AuthorizationType.basicauth); } @Test @@ -121,17 +121,16 @@ public void testConvertDockerHubRegistry() throws URISyntaxException { String b = "{\"auths\":{\"demo.repository.io\":{\"username\":\"testuser\",\"password\":\"testpassword\",\"auth\":\"YWRtaW46SGFyYm9yMTIzNDU=\"}}}"; Map result = converter.convert(b); - assertThat(result.size(), is(1)); - assertThat(result.containsKey("demo.repository.io")).isTrue(); + assertThat(result).hasSize(1); + assertThat(result).containsKey("demo.repository.io"); ContainerRegistryConfiguration registryConfiguration = result.get("demo.repository.io"); - assertThat(registryConfiguration.getRegistryHost(), is("demo.repository.io")); - assertThat(registryConfiguration.getUser(), is("testuser")); - assertThat(registryConfiguration.getSecret(), is("testpassword")); - assertThat(registryConfiguration.getAuthorizationType(), is(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2)); - assertThat(registryConfiguration.getExtra().get("registryAuthUri"), - is("https://demo.repository.io/service/token?service=demo-registry&scope=repository:{repository}:pull")); + assertThat(registryConfiguration.getRegistryHost()).isEqualTo("demo.repository.io"); + assertThat(registryConfiguration.getUser()).isEqualTo("testuser"); + assertThat(registryConfiguration.getSecret()).isEqualTo("testpassword"); + assertThat(registryConfiguration.getAuthorizationType()).isEqualTo(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2); + assertThat(registryConfiguration.getExtra().get("registryAuthUri")).isEqualTo("https://demo.repository.io/service/token?service=demo-registry&scope=repository:{repository}:pull"); } diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderOnSignedS3RequestRedirectStrategyTest.java b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderOnSignedS3RequestRedirectStrategyTest.java index 0bcc0b49b7..b477ac3536 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderOnSignedS3RequestRedirectStrategyTest.java +++ b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderOnSignedS3RequestRedirectStrategyTest.java @@ -19,9 +19,10 @@ import java.util.Collections; import java.util.Map; -import org.junit.After; -import org.junit.ClassRule; -import org.junit.Test; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; import org.springframework.boot.test.autoconfigure.web.client.AutoConfigureWebClient; import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolverAutoConfiguration; @@ -42,13 +43,13 @@ */ public class DropAuthorizationHeaderOnSignedS3RequestRedirectStrategyTest { - @ClassRule + @RegisterExtension public final static S3SignedRedirectRequestServerResource s3SignedRedirectRequestServerResource = new S3SignedRedirectRequestServerResource(); private AnnotationConfigApplicationContext context; - @After + @AfterEach public void clean() { if (context != null) { context.close(); diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/S3SignedRedirectRequestServerResource.java b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/S3SignedRedirectRequestServerResource.java index 0ff2c01608..4401ec4f0e 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/S3SignedRedirectRequestServerResource.java +++ b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/S3SignedRedirectRequestServerResource.java @@ -16,7 +16,10 @@ package org.springframework.cloud.dataflow.container.registry.authorization; -import org.junit.rules.ExternalResource; + +import org.junit.jupiter.api.extension.AfterEachCallback; +import org.junit.jupiter.api.extension.BeforeEachCallback; +import org.junit.jupiter.api.extension.ExtensionContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,7 +31,7 @@ /** * @author Adam J. Weigold */ -public class S3SignedRedirectRequestServerResource extends ExternalResource { +public class S3SignedRedirectRequestServerResource implements BeforeEachCallback, AfterEachCallback { private static final Logger logger = LoggerFactory.getLogger(S3SignedRedirectRequestServerResource.class); @@ -36,12 +39,9 @@ public class S3SignedRedirectRequestServerResource extends ExternalResource { private ConfigurableApplicationContext application; - public S3SignedRedirectRequestServerResource() { - super(); - } - @Override - protected void before() throws Throwable { + @Override + public void beforeEach(ExtensionContext context) throws Exception { this.s3SignedRedirectServerPort = SocketUtils.findAvailableTcpPort(); @@ -56,9 +56,9 @@ protected void before() throws Throwable { logger.info("S3 Signed Redirect Server Server is UP!"); } - @Override - protected void after() { - application.stop(); + @Override + public void afterEach(ExtensionContext context) throws Exception { + application.stop(); } public int getS3SignedRedirectServerPort() { diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor-legacy/META-INF/spring-configuration-metadata-whitelist.properties b/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor-legacy/META-INF/spring-configuration-metadata-whitelist.properties deleted file mode 100644 index 7cd3dbb2a4..0000000000 --- a/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor-legacy/META-INF/spring-configuration-metadata-whitelist.properties +++ /dev/null @@ -1,2 +0,0 @@ -configuration-properties.classes=foo.bar.FilterProperties -configuration-properties.names=some.other.property.included.prefix.expresso2 diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor-legacy/META-INF/spring-configuration-metadata.json b/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor-legacy/META-INF/spring-configuration-metadata.json deleted file mode 100644 index 21d7839a7a..0000000000 --- a/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor-legacy/META-INF/spring-configuration-metadata.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "groups": [ - { - "name": "filter", - "type": "foo.bar.FilterProperties", - "sourceType": "foo.bar.FilterProperties" - } - ], - "properties": [ - { - "name": "filter.expression", - "type": "org.springframework.expression.Expression", - "description": "A predicate to evaluate", - "sourceType": "foo.bar.FilterProperties", - "defaultValue": "true" - }, - { - "name": "some.other.property.included.prefix.expresso2", - "type": "org.springframework.cloud.dataflow.completion.Expresso", - "description": "A property of type enum and whose name starts like 'expression'", - "sourceType": "com.acme.SomeDifferentProperties" - }, - { - "name": "some.prefix.hidden.by.default.secret", - "type": "java.lang.String", - "description": "Some hidden option", - "sourceType": "com.acme.OtherProperties" - } - ], - "hints": [] -} diff --git a/spring-cloud-dataflow-container-registry/pom.xml b/spring-cloud-dataflow-container-registry/pom.xml index e52b3ece13..afdfc5461e 100644 --- a/spring-cloud-dataflow-container-registry/pom.xml +++ b/spring-cloud-dataflow-container-registry/pom.xml @@ -4,11 +4,17 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-container-registry + spring-cloud-dataflow-container-registry + Spring Cloud Data Flow Container Registry + UTF-8 + true + 3.4.1 @@ -63,4 +69,45 @@ test + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImage.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImage.java index ec9e405428..22761ece42 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImage.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImage.java @@ -96,6 +96,7 @@ enum RepositoryReferenceType {tag, digest, unknown} /** * Helper method that returns the full Registry host address (host:port) + * @return The registry host name */ public String getRegistryHost() { return this.hostname + (StringUtils.hasText(this.port) ? ":" + this.port : ""); @@ -103,6 +104,7 @@ public String getRegistryHost() { /** * Helper method that returns the full Repository name (e.g. namespace/registryName) without the tag or digest. + * @return The repository name. */ public String getRepository() { String ns = StringUtils.hasText(this.repositoryNamespace) ? this.repositoryNamespace + "/" : ""; diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImageRestTemplateFactory.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImageRestTemplateFactory.java index c3b680ddc4..2fc476bd6a 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImageRestTemplateFactory.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImageRestTemplateFactory.java @@ -20,6 +20,8 @@ import java.security.NoSuchAlgorithmException; import java.security.cert.X509Certificate; import java.util.ArrayList; +import java.util.Collections; +import java.util.Map; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; @@ -73,6 +75,7 @@ * * * @author Christian Tzolov + * @author Cheng Guan Poh */ public class ContainerImageRestTemplateFactory { @@ -124,10 +127,14 @@ public ContainerImageRestTemplateFactory(RestTemplateBuilder restTemplateBuilder } public RestTemplate getContainerRestTemplate(boolean skipSslVerification, boolean withHttpProxy) { + return this.getContainerRestTemplate(skipSslVerification, withHttpProxy, Collections.emptyMap()); + } + + public RestTemplate getContainerRestTemplate(boolean skipSslVerification, boolean withHttpProxy, Map extra) { try { CacheKey cacheKey = CacheKey.of(skipSslVerification, withHttpProxy); if (!this.restTemplateCache.containsKey(cacheKey)) { - RestTemplate restTemplate = createContainerRestTemplate(skipSslVerification, withHttpProxy); + RestTemplate restTemplate = createContainerRestTemplate(skipSslVerification, withHttpProxy, extra); this.restTemplateCache.putIfAbsent(cacheKey, restTemplate); } return this.restTemplateCache.get(cacheKey); @@ -139,12 +146,12 @@ public RestTemplate getContainerRestTemplate(boolean skipSslVerification, boolea } } - private RestTemplate createContainerRestTemplate(boolean skipSslVerification, boolean withHttpProxy) + private RestTemplate createContainerRestTemplate(boolean skipSslVerification, boolean withHttpProxy, Map extra) throws NoSuchAlgorithmException, KeyManagementException { if (!skipSslVerification) { // Create a RestTemplate that uses custom request factory - return this.initRestTemplate(HttpClients.custom(), withHttpProxy); + return this.initRestTemplate(HttpClients.custom(), withHttpProxy, extra); } // Trust manager that blindly trusts all SSL certificates. @@ -170,10 +177,11 @@ public void checkServerTrusted(java.security.cert.X509Certificate[] certs, Strin HttpClients.custom() .setSSLContext(sslContext) .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE), - withHttpProxy); + withHttpProxy, + extra); } - private RestTemplate initRestTemplate(HttpClientBuilder clientBuilder, boolean withHttpProxy) { + private RestTemplate initRestTemplate(HttpClientBuilder clientBuilder, boolean withHttpProxy, Map extra) { clientBuilder.setDefaultRequestConfig(RequestConfig.custom().setCookieSpec(CookieSpecs.STANDARD).build()); @@ -189,7 +197,7 @@ private RestTemplate initRestTemplate(HttpClientBuilder clientBuilder, boolean w HttpComponentsClientHttpRequestFactory customRequestFactory = new HttpComponentsClientHttpRequestFactory( clientBuilder - .setRedirectStrategy(new DropAuthorizationHeaderRequestRedirectStrategy()) + .setRedirectStrategy(new DropAuthorizationHeaderRequestRedirectStrategy(extra)) // Azure redirects may contain double slashes and on default those are normilised .setDefaultRequestConfig(RequestConfig.custom().setNormalizeUri(false).build()) .build()); diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfiguration.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfiguration.java index d332ab512e..6cb7450861 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfiguration.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfiguration.java @@ -23,38 +23,38 @@ /** * Configurations specific for each target Container Registry provider/instance. - * + *

* The Docker Hub configuration is set by default. Additional registries can be configured through the * {@link ContainerRegistryProperties#getRegistryConfigurations()} properties like this: * - * - * Configure Arifactory/JFrog private container registry: - * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].registry-host=springsource-docker-private-local.jfrog.io - * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].authorization-type=basicauth - * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].user=[artifactory user] - * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].secret=[artifactory encryptedkey] - * - * Configure Amazon ECR private registry: - * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].registry-host=283191309520.dkr.ecr.us-west-1.amazonaws.com - * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].authorization-type=awsecr - * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].user=[your AWS accessKey] - * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].secret=[your AWS secretKey] - * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].extra[region]=us-west-1 - * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].extra[registryIds]=283191309520 - * - * Configure Azure private container registry - * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].registry-host=tzolovazureregistry.azurecr.io - * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].authorization-type=basicauth - * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].user=[your Azure registry username] - * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].secret=[your Azure registry access password] - * - * Harbor Registry. Same as DockerHub but with different registryAuthUri - * - spring.cloud.dataflow.container.registry-configurations[harbor].registry-host=demo.goharbor.io - * - spring.cloud.dataflow.container.registry-configurations[harbor].authorization-type=dockeroauth2 - * - spring.cloud.dataflow.container.registry-configurations[harbor].user=admin - * - spring.cloud.dataflow.container.registry-configurations[harbor].secret=Harbor12345 - * - spring.cloud.dataflow.container.registry-configurations[harbor].extra[registryAuthUri]=https://demo.goharbor.io/service/token?service=harbor-registry&scope=repository:{repository}:pull - * + * {@code + * Configure Arifactory/JFrog private container registry: + * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].registry-host=springsource-docker-private-local.jfrog.io + * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].authorization-type=basicauth + * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].user=[artifactory user] + * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].secret=[artifactory encryptedkey] + *

+ * Configure Amazon ECR private registry: + * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].registry-host=283191309520.dkr.ecr.us-west-1.amazonaws.com + * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].authorization-type=awsecr + * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].user=[your AWS accessKey] + * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].secret=[your AWS secretKey] + * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].extra[region]=us-west-1 + * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].extra[registryIds]=283191309520 + *

+ * Configure Azure private container registry + * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].registry-host=tzolovazureregistry.azurecr.io + * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].authorization-type=basicauth + * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].user=[your Azure registry username] + * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].secret=[your Azure registry access password] + *

+ * Harbor Registry. Same as DockerHub but with different registryAuthUri + * - spring.cloud.dataflow.container.registry-configurations[harbor].registry-host=demo.goharbor.io + * - spring.cloud.dataflow.container.registry-configurations[harbor].authorization-type=dockeroauth2 + * - spring.cloud.dataflow.container.registry-configurations[harbor].user=admin + * - spring.cloud.dataflow.container.registry-configurations[harbor].secret=Harbor12345 + * - spring.cloud.dataflow.container.registry-configurations[harbor].extra[registryAuthUri]=https://demo.goharbor.io/service/token?service=harbor-registry&scope=repository:repository-name:pull: + * } * * @author Christian Tzolov */ @@ -92,7 +92,7 @@ public enum AuthorizationType { /** * Container Registry Host (and optional port). Must me unique per registry. - * + *

* Used as a key to to map a container image to target registry where it is stored! */ private String registryHost; @@ -102,6 +102,7 @@ public enum AuthorizationType { * (determined by the {@link #authorizationType}) to authorize the registry access. */ private String user; + private String secret; /** @@ -197,14 +198,14 @@ public void setUseHttpProxy(boolean useHttpProxy) { @Override public String toString() { return "ContainerRegistryConfiguration{" + - "registryHost='" + registryHost + '\'' + - ", user='" + user + '\'' + - ", secret='****'" + '\'' + - ", authorizationType=" + authorizationType + - ", manifestMediaType='" + manifestMediaType + '\'' + - ", disableSslVerification='" + disableSslVerification + '\'' - +", useHttpProxy='" + useHttpProxy + '\'' + - ", extra=" + extra + - '}'; + "registryHost='" + registryHost + '\'' + + ", user='" + user + '\'' + + ", secret='****'" + '\'' + + ", authorizationType=" + authorizationType + + ", manifestMediaType='" + manifestMediaType + '\'' + + ", disableSslVerification='" + disableSslVerification + '\'' + + ", useHttpProxy='" + useHttpProxy + '\'' + + ", extra=" + extra + + '}'; } } diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryService.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryService.java index 3105c7f0ff..6a0a4cd61f 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryService.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryService.java @@ -109,14 +109,16 @@ public List getTags(String registryName, String repositoryName) { .build().expand(repositoryName); RestTemplate requestRestTemplate = this.containerImageRestTemplateFactory.getContainerRestTemplate( - containerRegistryConfiguration.isDisableSslVerification(), containerRegistryConfiguration.isUseHttpProxy()); + containerRegistryConfiguration.isDisableSslVerification(), + containerRegistryConfiguration.isUseHttpProxy(), + containerRegistryConfiguration.getExtra()); ResponseEntity manifest = requestRestTemplate.exchange(manifestUriComponents.toUri(), HttpMethod.GET, new HttpEntity<>(httpHeaders), Map.class); return (List) manifest.getBody().get(TAGS_FIELD); } catch (Exception e) { - logger.error(String.format("Exception getting tag information for the %s from %s", repositoryName, registryName)); + logger.error("Exception getting tag information for the {} from {}", repositoryName, registryName); } return null; } @@ -145,14 +147,16 @@ public Map getRepositories(String registryName) { RestTemplate requestRestTemplate = this.containerImageRestTemplateFactory.getContainerRestTemplate( - containerRegistryConfiguration.isDisableSslVerification(), containerRegistryConfiguration.isUseHttpProxy()); + containerRegistryConfiguration.isDisableSslVerification(), + containerRegistryConfiguration.isUseHttpProxy(), + containerRegistryConfiguration.getExtra()); ResponseEntity manifest = requestRestTemplate.exchange(manifestUriComponents.toUri(), HttpMethod.GET, new HttpEntity<>(httpHeaders), Map.class); return manifest.getBody(); } catch (Exception e) { - logger.error(String.format("Exception getting repositories from %s", registryName)); + logger.error("Exception getting repositories from {}", registryName); } return null; } @@ -184,7 +188,7 @@ public ContainerRegistryRequest getRegistryRequest(String imageName) { } RestTemplate requestRestTemplate = this.containerImageRestTemplateFactory.getContainerRestTemplate( - registryConf.isDisableSslVerification(), registryConf.isUseHttpProxy()); + registryConf.isDisableSslVerification(), registryConf.isUseHttpProxy(), registryConf.getExtra()); return new ContainerRegistryRequest(containerImage, registryConf, authHttpHeaders, requestRestTemplate); } diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverter.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverter.java index cbacdf57c3..710913c7c9 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverter.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverter.java @@ -16,6 +16,7 @@ package org.springframework.cloud.dataflow.container.registry.authorization; +import java.net.URI; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -43,43 +44,46 @@ /** * @author Christian Tzolov + * @author Corneil du Plessis */ public class DockerConfigJsonSecretToRegistryConfigurationConverter implements Converter> { private static final Logger logger = LoggerFactory.getLogger(DockerConfigJsonSecretToRegistryConfigurationConverter.class); + public static final String BEARER_REALM_ATTRIBUTE = "Bearer realm"; + public static final String SERVICE_ATTRIBUTE = "service"; + public static final String HTTPS_INDEX_DOCKER_IO_V_1 = "https://index.docker.io/v1/"; + public static final String DOCKER_IO = "docker.io"; + public static final String REGISTRY_1_DOCKER_IO = "registry-1.docker.io"; - // private final RestTemplate restTemplate; - private final ContainerImageRestTemplateFactory containerImageRestTemplate; + private final ContainerImageRestTemplateFactory containerImageRestTemplateFactory; private final Map httpProxyPerHost; private final boolean replaceDefaultDockerRegistryServer; - public DockerConfigJsonSecretToRegistryConfigurationConverter(ContainerRegistryProperties properties, - ContainerImageRestTemplateFactory containerImageRestTemplate) { - + public DockerConfigJsonSecretToRegistryConfigurationConverter( + ContainerRegistryProperties properties, + ContainerImageRestTemplateFactory containerImageRestTemplateFactory) { this.replaceDefaultDockerRegistryServer = properties.isReplaceDefaultDockerRegistryServer(); - // Retrieve registry configurations, explicitly declared via properties. this.httpProxyPerHost = properties.getRegistryConfigurations().entrySet().stream() - .collect(Collectors.toMap(e -> e.getValue().getRegistryHost(), e -> e.getValue().isUseHttpProxy())); - this.containerImageRestTemplate = containerImageRestTemplate; + .collect(Collectors.toMap(e -> e.getValue().getRegistryHost(), e -> e.getValue().isUseHttpProxy())); + this.containerImageRestTemplateFactory = containerImageRestTemplateFactory; } /** * The .dockerconfigjson value hast the following format: * - * {"auths":{"demo.goharbor.io":{"username":"admin","password":"Harbor12345","auth":"YWRtaW46SGFyYm9yMTIzNDU="}}} + * {"auths":{"demo.goharbor.io":{"username":"admin","password":"Harbor12345","auth":"YWRtaW46SGFyYm9yMTIzNDU="}}} * - * + *

* The map key is the registry host name and the value contains the username and password to access this registry. * * @param dockerconfigjson to convert into RegistryConfiguration map. - * * @return Return as (host-name, registry-configuration) map constructed from the dockerconfigjson content. */ @Override @@ -98,17 +102,15 @@ public Map convert(String dockerconfigjs rc.setSecret((String) registryMap.get("password")); Optional tokenAccessUrl = getDockerTokenServiceUri(rc.getRegistryHost(), - true, this.httpProxyPerHost.getOrDefault(rc.getRegistryHost(), false)); + true, this.httpProxyPerHost.getOrDefault(rc.getRegistryHost(), false)); if (tokenAccessUrl.isPresent()) { rc.setAuthorizationType(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2); rc.getExtra().put(DockerOAuth2RegistryAuthorizer.DOCKER_REGISTRY_AUTH_URI_KEY, tokenAccessUrl.get()); - } - else { + } else { if (StringUtils.isEmpty(rc.getUser()) && StringUtils.isEmpty(rc.getSecret())) { rc.setAuthorizationType(ContainerRegistryConfiguration.AuthorizationType.anonymous); - } - else { + } else { rc.setAuthorizationType(ContainerRegistryConfiguration.AuthorizationType.basicauth); } } @@ -118,8 +120,7 @@ public Map convert(String dockerconfigjs registryConfigurationMap.put(rc.getRegistryHost(), rc); } return registryConfigurationMap; - } - catch (Exception e) { + } catch (Exception e) { logger.error("Failed to parse the Secrets in dockerconfigjson"); } } @@ -134,7 +135,7 @@ public Map convert(String dockerconfigjs * To be able to reuse docker registry secretes for the purpose of imagePullSecrets and SCDF Container Metadata retrieval. * by default the `https://index.docker.io/v1/` and `domain.io` docker-server values found in any mounted dockerconfigjson secret * are replaced by `registry-1.docker.io`. - * + *

* You can override this behaviour by setting replaceDefaultDockerRegistryServer to false. * * @param dockerConfigJsonRegistryHost Docker-Server property value as extracted from the dockerconfigjson. @@ -142,40 +143,61 @@ public Map convert(String dockerconfigjs */ private String replaceDefaultDockerRegistryServerUrl(String dockerConfigJsonRegistryHost) { return (this.replaceDefaultDockerRegistryServer && (DOCKER_IO.equals(dockerConfigJsonRegistryHost) - || HTTPS_INDEX_DOCKER_IO_V_1.equals(dockerConfigJsonRegistryHost))) ? - REGISTRY_1_DOCKER_IO : dockerConfigJsonRegistryHost; + || HTTPS_INDEX_DOCKER_IO_V_1.equals(dockerConfigJsonRegistryHost))) ? + REGISTRY_1_DOCKER_IO : dockerConfigJsonRegistryHost; } /** * Best effort to construct a valid Docker OAuth2 token authorization uri from the HTTP 401 Error response. - * + *

* Hit the http://registry-host/v2/ and parse the on authorization error (401) response. * If a Www-Authenticate response header exists and contains a "Bearer realm" and "service" attributes then use * them to constructs the Token Endpoint URI. - * + *

* Returns null for non 401 errors or invalid Www-Authenticate content. - * + *

* Applicable only for dockeroauth2 authorization-type. * * @param registryHost Container Registry host to retrieve the tokenServiceUri for. + * @param disableSSl Disable SSL + * @param useHttpProxy Enable the use of http proxy. * @return Returns Token Endpoint Url or null. */ public Optional getDockerTokenServiceUri(String registryHost, boolean disableSSl, boolean useHttpProxy) { try { - RestTemplate restTemplate = this.containerImageRestTemplate.getContainerRestTemplate(disableSSl, useHttpProxy); - restTemplate.exchange( - UriComponentsBuilder.newInstance().scheme("https").host(registryHost).path("v2/").build().toUri(), - HttpMethod.GET, new HttpEntity<>(new HttpHeaders()), Map.class); + RestTemplate restTemplate = this.containerImageRestTemplateFactory.getContainerRestTemplate(disableSSl, useHttpProxy, Collections.emptyMap()); + String host = registryHost; + Integer port = null; + if (registryHost.contains(":")) { + int colon = registryHost.lastIndexOf(":"); + String portString = registryHost.substring(colon + 1); + try { + int intPort = Integer.parseInt(portString); + if (Integer.toString(intPort).equals(portString) && intPort > 0 && intPort < 32767) { + port = intPort; + host = registryHost.substring(0, colon); + } + } catch (NumberFormatException x) { + // not valid integer + } + } + UriComponentsBuilder uriComponentsBuilder = UriComponentsBuilder.newInstance().scheme("https").host(host); + if (port != null) { + uriComponentsBuilder.port(port); + } + uriComponentsBuilder.path("v2/"); + URI uri = uriComponentsBuilder.build().toUri(); + logger.info("getDockerTokenServiceUri:" + uri); + restTemplate.exchange(uri, HttpMethod.GET, new HttpEntity<>(new HttpHeaders()), Map.class); return Optional.empty(); - } - catch (HttpClientErrorException httpError) { + } catch (HttpClientErrorException httpError) { if (httpError.getRawStatusCode() != 401) { return Optional.empty(); } if (httpError.getResponseHeaders() == null - || !httpError.getResponseHeaders().containsKey(HttpHeaders.WWW_AUTHENTICATE)) { + || !httpError.getResponseHeaders().containsKey(HttpHeaders.WWW_AUTHENTICATE)) { return Optional.empty(); } @@ -188,18 +210,18 @@ public Optional getDockerTokenServiceUri(String registryHost, boolean di // Extract the "Bearer realm" and "service" attributes from the Www-Authenticate value Map wwwAuthenticateAttributes = Stream.of(wwwAuthenticate.get(0).split(",")) - .map(s -> s.split("=")) - .collect(Collectors.toMap(b -> b[0], b -> b[1])); + .map(s -> s.split("=")) + .collect(Collectors.toMap(b -> b[0], b -> b[1])); if (CollectionUtils.isEmpty(wwwAuthenticateAttributes) - || !wwwAuthenticateAttributes.containsKey(BEARER_REALM_ATTRIBUTE) - || !wwwAuthenticateAttributes.containsKey(SERVICE_ATTRIBUTE)) { + || !wwwAuthenticateAttributes.containsKey(BEARER_REALM_ATTRIBUTE) + || !wwwAuthenticateAttributes.containsKey(SERVICE_ATTRIBUTE)) { logger.warn("Invalid Www-Authenticate: {} for container registry {}", wwwAuthenticate, registryHost); return Optional.empty(); } String tokenServiceUri = String.format("%s?service=%s&scope=repository:{repository}:pull", - wwwAuthenticateAttributes.get(BEARER_REALM_ATTRIBUTE), wwwAuthenticateAttributes.get(SERVICE_ATTRIBUTE)); + wwwAuthenticateAttributes.get(BEARER_REALM_ATTRIBUTE), wwwAuthenticateAttributes.get(SERVICE_ATTRIBUTE)); // remove redundant quotes. tokenServiceUri = tokenServiceUri.replaceAll("\"", ""); @@ -207,8 +229,9 @@ public Optional getDockerTokenServiceUri(String registryHost, boolean di logger.info("tokenServiceUri: " + tokenServiceUri); return Optional.of(tokenServiceUri); - } - catch (Exception e) { + } catch (Exception e) { + // Log error because we cannot change the contract that returns empty optional. + logger.error("Ignoring:" + e, e); return Optional.empty(); } } diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerOAuth2RegistryAuthorizer.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerOAuth2RegistryAuthorizer.java index 1fbe38d823..3d2b9f02bf 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerOAuth2RegistryAuthorizer.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerOAuth2RegistryAuthorizer.java @@ -122,7 +122,9 @@ public HttpHeaders getAuthorizationHeaders(ContainerImage containerImage, Contai } private RestTemplate getRestTemplate(ContainerRegistryConfiguration registryConfiguration) { - return this.containerImageRestTemplate.getContainerRestTemplate(registryConfiguration.isDisableSslVerification(), - registryConfiguration.isUseHttpProxy()); + return this.containerImageRestTemplate.getContainerRestTemplate( + registryConfiguration.isDisableSslVerification(), + registryConfiguration.isUseHttpProxy(), + registryConfiguration.getExtra()); } } diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderRequestRedirectStrategy.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderRequestRedirectStrategy.java index 49b49d497b..c8c952d9cc 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderRequestRedirectStrategy.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderRequestRedirectStrategy.java @@ -18,8 +18,8 @@ import java.net.URI; import java.util.Arrays; +import java.util.Map; -import org.apache.commons.lang3.StringUtils; import org.apache.http.Header; import org.apache.http.HttpRequest; import org.apache.http.HttpResponse; @@ -31,8 +31,10 @@ import org.apache.http.impl.client.DefaultRedirectStrategy; import org.apache.http.protocol.HttpContext; +import org.springframework.util.StringUtils; + /** - * Both Amazon and Azure Container Registry services require special treatment for the Authorization headers when the + * Amazon, Azure and Custom Container Registry services require special treatment for the Authorization headers when the * HTTP request are forwarded to 3rd party services. * * Amazon: @@ -53,12 +55,18 @@ * Azure have same type of issues as S3 so header needs to be dropped as well. * (https://docs.microsoft.com/en-us/azure/container-registry/container-registry-faq#authentication-information-is-not-given-in-the-correct-format-on-direct-rest-api-calls) * + * Custom: + * Custom Container Registry may have same type of issues as S3 so header needs to be dropped as well. + * * @author Adam J. Weigold * @author Janne Valkealahti * @author Christian Tzolov + * @author Cheng Guan Poh */ public class DropAuthorizationHeaderRequestRedirectStrategy extends DefaultRedirectStrategy { + private static final String CUSTOM_REGISTRY = "custom-registry"; + private static final String AMZ_CREDENTIAL = "X-Amz-Credential"; private static final String AUTHORIZATION_HEADER = "Authorization"; @@ -67,42 +75,59 @@ public class DropAuthorizationHeaderRequestRedirectStrategy extends DefaultRedir private static final String BASIC_AUTH = "Basic"; + /** + * Additional registry specific configuration properties - usually used inside the Registry authorizer + * implementations (eg. the AwsEcrAuthorizer implementation). + */ + private Map extra; + + public DropAuthorizationHeaderRequestRedirectStrategy(Map extra) { + this.extra = extra; + } + @Override public HttpUriRequest getRedirect(final HttpRequest request, final HttpResponse response, final HttpContext context) throws ProtocolException { HttpUriRequest httpUriRequest = super.getRedirect(request, response, context); + String query = httpUriRequest.getURI().getQuery(); + String method = request.getRequestLine().getMethod(); // Handle Amazon requests - final String query = httpUriRequest.getURI().getQuery(); - - if (StringUtils.isNoneEmpty(query) && query.contains(AMZ_CREDENTIAL)) { - final String method = request.getRequestLine().getMethod(); - if (StringUtils.isNoneEmpty(method) - && (method.equalsIgnoreCase(HttpHead.METHOD_NAME) || method.equalsIgnoreCase(HttpGet.METHOD_NAME))) { + if (StringUtils.hasText(query) && query.contains(AMZ_CREDENTIAL)) { + if (isHeadOrGetMethod(method)) { return new DropAuthorizationHeaderHttpRequestBase(httpUriRequest.getURI(), method); } } // Handle Azure requests if (request.getRequestLine().getUri().contains(AZURECR_URI_SUFFIX)) { - final String method = request.getRequestLine().getMethod(); - if (StringUtils.isNoneEmpty(method) - && (method.equalsIgnoreCase(HttpHead.METHOD_NAME) || method.equalsIgnoreCase(HttpGet.METHOD_NAME))) { + if (isHeadOrGetMethod(method)) { return new DropAuthorizationHeaderHttpRequestBase(httpUriRequest.getURI(), method) { - // drop headers only for the Basic Auth and leve them unchanged for OAuth2! + // Drop headers only for the Basic Auth and leave unchanged for OAuth2 @Override protected boolean isDropHeader(String name, String value) { - return name.equalsIgnoreCase(AUTHORIZATION_HEADER) && StringUtils.isNoneEmpty(value) - && value.contains(BASIC_AUTH); + return name.equalsIgnoreCase(AUTHORIZATION_HEADER) && StringUtils.hasText(value) && value.contains(BASIC_AUTH); } }; } } + // Handle Custom requests + if (extra.containsKey(CUSTOM_REGISTRY) && request.getRequestLine().getUri().contains(extra.get(CUSTOM_REGISTRY))) { + if (isHeadOrGetMethod(method)) { + return new DropAuthorizationHeaderHttpRequestBase(httpUriRequest.getURI(), method); + } + } + return httpUriRequest; } + private boolean isHeadOrGetMethod(String method) { + return StringUtils.hasText(method) + && (method.equalsIgnoreCase(HttpHead.METHOD_NAME) || method.equalsIgnoreCase(HttpGet.METHOD_NAME)); + } + /** * Overrides all header setter methods to filter out the Authorization headers. */ diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/RegistryAuthorizer.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/RegistryAuthorizer.java index b3fdb7249a..03899c135c 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/RegistryAuthorizer.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/RegistryAuthorizer.java @@ -50,6 +50,7 @@ public interface RegistryAuthorizer { /** * @param registryConfiguration configuration such as credentials and additional information required to obtain the * authorized headers. + * @param configProperties configuration properties for obtaining the authorized headers. * @return Returns HTTP headers, configured with authorization credentials or tokens that would allow access * the target Registry. */ diff --git a/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerImageParserTests.java b/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerImageParserTests.java index bc14230c66..159872e5db 100644 --- a/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerImageParserTests.java +++ b/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerImageParserTests.java @@ -20,23 +20,23 @@ import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; /** * @author Christian Tzolov */ public class ContainerImageParserTests { - private ContainerImageParser containerImageNameParser = + private final ContainerImageParser containerImageNameParser = new ContainerImageParser("test-domain.io", "tag654", "official-repo-name"); @Test public void testParseWithoutDefaults2() { ContainerImage containerImageName = - containerImageNameParser.parse("dev.registry.pivotal.io/p-scdf-for-kubernetes/spring-cloud-dataflow-composed-task-runner@sha256:c838be82e886b0db98ed847487ec6bf94f12e511ebe5659bd5fbe43597a4b734"); + containerImageNameParser.parse("dev.registry.tanzu.vmware.com/p-scdf-for-kubernetes/spring-cloud-dataflow-composed-task-runner@sha256:c838be82e886b0db98ed847487ec6bf94f12e511ebe5659bd5fbe43597a4b734"); - assertThat(containerImageName.getHostname()).isEqualTo("dev.registry.pivotal.io"); + assertThat(containerImageName.getHostname()).isEqualTo("dev.registry.tanzu.vmware.com"); assertThat(containerImageName.getRepositoryNamespace()).isEqualTo("p-scdf-for-kubernetes"); assertThat(containerImageName.getRepositoryName()).isEqualTo("spring-cloud-dataflow-composed-task-runner"); assertThat(containerImageName.getRepositoryTag()).isNull(); @@ -44,10 +44,10 @@ public void testParseWithoutDefaults2() { assertThat(containerImageName.getRepositoryDigest()).isEqualTo("sha256:c838be82e886b0db98ed847487ec6bf94f12e511ebe5659bd5fbe43597a4b734"); assertThat(containerImageName.getRepositoryReferenceType()).isEqualTo(ContainerImage.RepositoryReferenceType.digest); - assertThat(containerImageName.getRegistryHost()).isEqualTo("dev.registry.pivotal.io"); + assertThat(containerImageName.getRegistryHost()).isEqualTo("dev.registry.tanzu.vmware.com"); assertThat(containerImageName.getRepository()).isEqualTo("p-scdf-for-kubernetes/spring-cloud-dataflow-composed-task-runner"); - assertThat(containerImageName.getCanonicalName()).isEqualTo("dev.registry.pivotal.io/p-scdf-for-kubernetes/spring-cloud-dataflow-composed-task-runner@sha256:c838be82e886b0db98ed847487ec6bf94f12e511ebe5659bd5fbe43597a4b734"); + assertThat(containerImageName.getCanonicalName()).isEqualTo("dev.registry.tanzu.vmware.com/p-scdf-for-kubernetes/spring-cloud-dataflow-composed-task-runner@sha256:c838be82e886b0db98ed847487ec6bf94f12e511ebe5659bd5fbe43597a4b734"); } @Test @@ -105,13 +105,13 @@ public void testParseWithDefaults() { @Test public void testInvalidRegistryHostName() { - assertThrows(IllegalArgumentException.class, () -> + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> containerImageNameParser.parse("6666#.6:80/scdf/spring-image:123")); } @Test public void testInvalidRegistryPart() { - assertThrows(IllegalArgumentException.class, () -> + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> containerImageNameParser.parse("localhost:80bla/scdf/spring-image:123")); } } diff --git a/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfigurationPropertiesTest.java b/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfigurationPropertiesTest.java index 7d7a6b79ac..0a991dca62 100644 --- a/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfigurationPropertiesTest.java +++ b/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfigurationPropertiesTest.java @@ -19,7 +19,7 @@ import java.util.HashMap; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.runner.ApplicationContextRunner; diff --git a/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToContainerRegistryConfigurationConverterTest.java b/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToContainerRegistryConfigurationConverterTest.java index 3aefc79430..bc4d2074e9 100644 --- a/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToContainerRegistryConfigurationConverterTest.java +++ b/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToContainerRegistryConfigurationConverterTest.java @@ -21,8 +21,8 @@ import java.util.HashMap; import java.util.Map; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -37,16 +37,16 @@ import org.springframework.web.client.RestTemplate; import static org.assertj.core.api.Assertions.assertThat; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; + import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyMap; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.when; /** * @author Christian Tzolov + * @author Corneil du Plessis */ public class DockerConfigJsonSecretToContainerRegistryConfigurationConverterTest { @@ -58,10 +58,10 @@ public class DockerConfigJsonSecretToContainerRegistryConfigurationConverterTest private DockerConfigJsonSecretToRegistryConfigurationConverter converter; - @Before + @BeforeEach public void init() { MockitoAnnotations.initMocks(this); - when(containerImageRestTemplateFactory.getContainerRestTemplate(anyBoolean(), anyBoolean())).thenReturn(mockRestTemplate); + when(containerImageRestTemplateFactory.getContainerRestTemplate(anyBoolean(), anyBoolean(), anyMap())).thenReturn(mockRestTemplate); converter = new DockerConfigJsonSecretToRegistryConfigurationConverter(new ContainerRegistryProperties(), containerImageRestTemplateFactory); } @@ -75,15 +75,15 @@ public void testConvertAnonymousRegistry() throws URISyntaxException { String b = "{\"auths\":{\"demo.repository.io\":{}}}"; Map result = converter.convert(b); - assertThat(result.size(), is(1)); - assertThat(result.containsKey("demo.repository.io")).isTrue(); + assertThat(result).hasSize(1); + assertThat(result).containsKey("demo.repository.io"); ContainerRegistryConfiguration registryConfiguration = result.get("demo.repository.io"); - assertThat(registryConfiguration.getRegistryHost(), is("demo.repository.io")); - assertThat(registryConfiguration.getUser(), nullValue()); - assertThat(registryConfiguration.getSecret(), nullValue()); - assertThat(registryConfiguration.getAuthorizationType(), is(ContainerRegistryConfiguration.AuthorizationType.anonymous)); + assertThat(registryConfiguration.getRegistryHost()).isEqualTo("demo.repository.io"); + assertThat(registryConfiguration.getUser()).isNull(); + assertThat(registryConfiguration.getSecret()).isNull(); + assertThat(registryConfiguration.getAuthorizationType()).isEqualTo(ContainerRegistryConfiguration.AuthorizationType.anonymous); } @Test @@ -96,15 +96,36 @@ public void testConvertBasicAuthRegistry() throws URISyntaxException { String b = "{\"auths\":{\"demo.repository.io\":{\"username\":\"testuser\",\"password\":\"testpassword\",\"auth\":\"YWRtaW46SGFyYm9yMTIzNDU=\"}}}"; Map result = converter.convert(b); - assertThat(result.size(), is(1)); - assertThat(result.containsKey("demo.repository.io")).isTrue(); + assertThat(result).hasSize(1); + assertThat(result).containsKey("demo.repository.io"); ContainerRegistryConfiguration registryConfiguration = result.get("demo.repository.io"); - assertThat(registryConfiguration.getRegistryHost(), is("demo.repository.io")); - assertThat(registryConfiguration.getUser(), is("testuser")); - assertThat(registryConfiguration.getSecret(), is("testpassword")); - assertThat(registryConfiguration.getAuthorizationType(), is(ContainerRegistryConfiguration.AuthorizationType.basicauth)); + assertThat(registryConfiguration.getRegistryHost()).isEqualTo("demo.repository.io"); + assertThat(registryConfiguration.getUser()).isEqualTo("testuser"); + assertThat(registryConfiguration.getSecret()).isEqualTo("testpassword"); + assertThat(registryConfiguration.getAuthorizationType()).isEqualTo(ContainerRegistryConfiguration.AuthorizationType.basicauth); + } + + @Test + public void testConvertWithPort() throws URISyntaxException { + + when(mockRestTemplate.exchange( + eq(new URI("https://demo.repository.io/v2/_catalog")), eq(HttpMethod.GET), any(), eq(Map.class))) + .thenReturn(new ResponseEntity<>(new HashMap<>(), HttpStatus.OK)); + + String b = "{\"auths\":{\"demo.repository.io:5050\":{\"username\":\"testuser\",\"password\":\"testpassword\",\"auth\":\"YWRtaW46SGFyYm9yMTIzNDU=\"}}}"; + Map result = converter.convert(b); + + assertThat(result).hasSize(1); + assertThat(result).containsKey("demo.repository.io:5050"); + + ContainerRegistryConfiguration registryConfiguration = result.get("demo.repository.io:5050"); + + assertThat(registryConfiguration.getRegistryHost()).isEqualTo("demo.repository.io:5050"); + assertThat(registryConfiguration.getUser()).isEqualTo("testuser"); + assertThat(registryConfiguration.getSecret()).isEqualTo("testpassword"); + assertThat(registryConfiguration.getAuthorizationType()).isEqualTo(ContainerRegistryConfiguration.AuthorizationType.basicauth); } @Test @@ -121,17 +142,16 @@ public void testConvertDockerHubRegistry() throws URISyntaxException { String b = "{\"auths\":{\"demo.repository.io\":{\"username\":\"testuser\",\"password\":\"testpassword\",\"auth\":\"YWRtaW46SGFyYm9yMTIzNDU=\"}}}"; Map result = converter.convert(b); - assertThat(result.size(), is(1)); + assertThat(result).hasSize(1); assertThat(result.containsKey("demo.repository.io")).isTrue(); ContainerRegistryConfiguration registryConfiguration = result.get("demo.repository.io"); - assertThat(registryConfiguration.getRegistryHost(), is("demo.repository.io")); - assertThat(registryConfiguration.getUser(), is("testuser")); - assertThat(registryConfiguration.getSecret(), is("testpassword")); - assertThat(registryConfiguration.getAuthorizationType(), is(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2)); - assertThat(registryConfiguration.getExtra().get("registryAuthUri"), - is("https://demo.repository.io/service/token?service=demo-registry&scope=repository:{repository}:pull")); + assertThat(registryConfiguration.getRegistryHost()).isEqualTo("demo.repository.io"); + assertThat(registryConfiguration.getUser()).isEqualTo("testuser"); + assertThat(registryConfiguration.getSecret()).isEqualTo("testpassword"); + assertThat(registryConfiguration.getAuthorizationType()).isEqualTo(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2); + assertThat(registryConfiguration.getExtra().get("registryAuthUri")).isEqualTo("https://demo.repository.io/service/token?service=demo-registry&scope=repository:{repository}:pull"); } diff --git a/spring-cloud-dataflow-core-dsl/pom.xml b/spring-cloud-dataflow-core-dsl/pom.xml index 2ddbd88b86..406ccafabe 100644 --- a/spring-cloud-dataflow-core-dsl/pom.xml +++ b/spring-cloud-dataflow-core-dsl/pom.xml @@ -4,10 +4,18 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-core-dsl + spring-cloud-dataflow-core-dsl + Spring Cloud Data Flow Core DSL jar + + true + junit-vintage + 3.4.1 + org.springframework @@ -22,5 +30,47 @@ spring-boot-starter-test test + + org.junit.jupiter + junit-jupiter-api + test + + + org.assertj + assertj-core + test + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/StreamParser.java b/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/StreamParser.java index 092bb3bf44..6156fedc24 100644 --- a/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/StreamParser.java +++ b/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/StreamParser.java @@ -315,6 +315,7 @@ protected String getTokenData(Token token) { * Expected format: {@code ':' identifier [ '.' identifier ]*} *

* + * @param canDefault allows the user to peek ahead to parse a reference when working with colons in the syntax. * @return {@code DestinationNode} representing the destination reference */ protected DestinationNode eatDestinationReference(boolean canDefault) { @@ -366,7 +367,7 @@ protected DestinationNode eatDestinationReference(boolean canDefault) { *

* Expected formats: {@code appList: app (| app)*} A stream may end in an app (if it is * a sink) or be followed by a sink destination. - * + * @param preceedingSourceChannelSpecified indicator to parser about state of stream. * @return a list of {@code AppNode} */ protected List eatAppList(boolean preceedingSourceChannelSpecified) { diff --git a/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TaskVisitor.java b/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TaskVisitor.java index 41db7e373c..1473184399 100644 --- a/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TaskVisitor.java +++ b/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TaskVisitor.java @@ -20,7 +20,7 @@ * Basic visitor pattern for a parsed task. Provide a concrete implementation to * participate in the visit and pass it to a parsed TaskNode. A simple task only has one * sequence, for example: {@code appA && appB && appC}. In this situation - * preVisit(int) and postVisit(int) will only be called with 0. A more + * preVisit(int) and postVisit(int) will only be called with 0. A more * complex situation would be: * *

@@ -32,7 +32,7 @@
  *
  * This includes two sequences - as in two separate definitions. The primary definition
  * references other definitions where it would be too messy to inline them. In this case
- * preVisit(int) would be called for both 0 and 1.
+ * {@link #preVisit(FlowNode)} would be called.
  *
  * @author Andy Clement
  */
@@ -112,7 +112,7 @@ public void postVisit(TaskAppNode taskApp) {
 	}
 
 	/**
-	 * After visit(TaskAppNode) and before postVisit(TaskAppNode) the
+	 * After {@link #visit(TaskAppNode)} and before {@link #postVisit(TaskAppNode)} the
 	 * transitions (if there are any) are visited for that task app.
 	 *
 	 * @param transition the transition
diff --git a/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TransitionNode.java b/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TransitionNode.java
index 58f0558585..e92946b277 100644
--- a/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TransitionNode.java
+++ b/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TransitionNode.java
@@ -18,12 +18,12 @@
 
 /**
  * An AST node representing a transition found in a parsed task specification. A
- * transition is expressed in the form "{@code STATE->TARGET}". If STATE is
+ * transition is expressed in the form "{@code STATE->TARGET}". If {@code STATE} is
  * unquoted it is considered a reference to the exit code of the preceding app (where
- * * means 'any exit code'). If STATE is quoted it is considered a
- * reference to the exit status of the preceding app (where '*' means 'any exit
- * status'). TARGET can be either a reference to a label, :foo, or a single app
- * name Foo.
+ * {@code *} means 'any exit code'). If {@code STATE} is quoted it is considered a
+ * reference to the exit status of the preceding app (where {@code '*'} means 'any exit
+ * status'). TARGET can be either a reference to a label, {@code :foo}, or a single app
+ * name {@code Foo}.
  *
  * @author Andy Clement
  */
@@ -134,8 +134,6 @@ public String getStatusToCheckInDSLForm() {
 	}
 
 	/**
-	 * The target is either an app or a reference. If it is an app then call
-	 * getTargetApp otherwise call getTargetReference.
 	 *
 	 * @return true if the target is an app
 	 */
diff --git a/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/NodeTests.java b/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/NodeTests.java
index ff25f306d9..e861fa1443 100644
--- a/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/NodeTests.java
+++ b/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/NodeTests.java
@@ -17,20 +17,24 @@
 
 import java.util.Collections;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+
+import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
+
+import static org.assertj.core.api.Assertions.assertThat;
 
-import static org.junit.Assert.assertEquals;
 
 /**
  * @author Oleg Zhurakousky
  * @author Andy Clement
+ * @author Corneil du Plessis
  */
 public class NodeTests {
 
 	@Test
 	public void testDestinationNodeDestinationName(){
 		DestinationNode node = new DestinationNode(0, 0, "foo.bar.bazz", null);
-		assertEquals("foo.bar.bazz", node.getDestinationName());
+		assertThat(node.getDestinationName()).isEqualTo("foo.bar.bazz");
 	}
 
 	@Test
@@ -39,7 +43,7 @@ public void testDestinationNodeToString(){
 		ArgumentNode an2 = new ArgumentNode("abc", "'xyz'", 0, 4);
 		DestinationNode node = new DestinationNode(0, 4, "foo.bar.bazz", new ArgumentNode[]{an1, an2});
 		System.out.println(node.stringify());
-		assertEquals(":foo.bar.bazz", node.toString());
+		assertThat(node.toString()).isEqualTo(":foo.bar.bazz");
 	}
 
 	@Test // see https://github.com/spring-cloud/spring-cloud-dataflow/issues/1568
@@ -53,6 +57,6 @@ public void testStreamNodesToString(){
 		DestinationNode sinkDNode = new DestinationNode(0, 0, "sink.bar.bazz", null);
 		SinkDestinationNode sink = new SinkDestinationNode(sinkDNode, 4);
 		StreamNode sNode = new StreamNode(null, "myStream", Collections.singletonList(appNode), source, sink);
-		assertEquals("myStream = :source.bar.bazz > bar --foo=bar --abc='xyz' > :sink.bar.bazz", sNode.toString());
+		assertThat(sNode.toString()).isEqualTo("myStream = :source.bar.bazz > bar --foo=bar --abc='xyz' > :sink.bar.bazz");
 	}
 }
diff --git a/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/TaskParserTests.java b/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/TaskParserTests.java
index 0638067930..3e987316c7 100644
--- a/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/TaskParserTests.java
+++ b/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/TaskParserTests.java
@@ -22,19 +22,16 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
 import org.springframework.cloud.dataflow.core.dsl.graph.Graph;
 import org.springframework.cloud.dataflow.core.dsl.graph.Link;
 import org.springframework.cloud.dataflow.core.dsl.graph.Node;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.fail;
+import static org.junit.jupiter.api.Assertions.assertInstanceOf;
 
 /**
  * Test the parser and visitor infrastructure. Check it accepts expected data and
@@ -45,6 +42,7 @@
  * @author David Turanski
  * @author Michael Minella
  * @author Eric Bottard
+ * @author Corneil du Plessis
  */
 public class TaskParserTests {
 
@@ -55,40 +53,40 @@ public class TaskParserTests {
 	@Test
 	public void oneApp() {
 		TaskNode taskNode = parse("foo");
-		assertFalse(taskNode.isComposed());
+		assertThat(taskNode.isComposed()).isFalse();
 		TaskAppNode appNode = taskNode.getTaskApp();
-		assertEquals("foo", appNode.getName());
-		assertEquals(0, appNode.getArguments().length);
-		assertEquals(0, appNode.startPos);
-		assertEquals(3, appNode.endPos);
+		assertThat(appNode.getName()).isEqualTo("foo");
+		assertThat(appNode.getArguments().length).isEqualTo(0);
+		assertThat(appNode.startPos).isEqualTo(0);
+		assertThat(appNode.endPos).isEqualTo(3);
 	}
 
 	@Test
 	public void hyphenatedAppName() {
 		appNode = parse("gemfire-cq").getTaskApp();
-		assertEquals("gemfire-cq:0>10", appNode.stringify(true));
+		assertThat(appNode.stringify(true)).isEqualTo("gemfire-cq:0>10");
 	}
 
 	@Test
 	public void oneAppWithParam() {
 		appNode = parse("foo --name=value").getTaskApp();
-		assertEquals("foo --name=value:0>16", appNode.stringify(true));
+		assertThat(appNode.stringify(true)).isEqualTo("foo --name=value:0>16");
 	}
 
 	@Test
 	public void oneAppWithTwoParams() {
 		appNode = parse("foo --name=value --x=y").getTaskApp();
 
-		assertEquals("foo", appNode.getName());
+		assertThat(appNode.getName()).isEqualTo("foo");
 		ArgumentNode[] args = appNode.getArguments();
-		assertNotNull(args);
-		assertEquals(2, args.length);
-		assertEquals("name", args[0].getName());
-		assertEquals("value", args[0].getValue());
-		assertEquals("x", args[1].getName());
-		assertEquals("y", args[1].getValue());
+		assertThat(args).isNotNull();
+		assertThat(args.length).isEqualTo(2);
+		assertThat(args[0].getName()).isEqualTo("name");
+		assertThat(args[0].getValue()).isEqualTo("value");
+		assertThat(args[1].getName()).isEqualTo("x");
+		assertThat(args[1].getValue()).isEqualTo("y");
 
-		assertEquals("foo --name=value --x=y:0>22", appNode.stringify(true));
+		assertThat(appNode.stringify(true)).isEqualTo("foo --name=value --x=y:0>22");
 	}
 
 	@Test
@@ -96,31 +94,31 @@ public void testParameters() {
 		String module = "gemfire-cq --query='Select * from /Stocks where symbol=''VMW''' --regionName=foo --foo=bar";
 		TaskAppNode gemfireApp = parse(module).getTaskApp();
 		Map parameters = gemfireApp.getArgumentsAsMap();
-		assertEquals(3, parameters.size());
-		assertEquals("Select * from /Stocks where symbol='VMW'", parameters.get("query"));
-		assertEquals("foo", parameters.get("regionName"));
-		assertEquals("bar", parameters.get("foo"));
+		assertThat(parameters.size()).isEqualTo(3);
+		assertThat(parameters.get("query")).isEqualTo("Select * from /Stocks where symbol='VMW'");
+		assertThat(parameters.get("regionName")).isEqualTo("foo");
+		assertThat(parameters.get("foo")).isEqualTo("bar");
 
 		module = "test";
 		parameters = parse(module).getTaskApp().getArgumentsAsMap();
-		assertEquals(0, parameters.size());
+		assertThat(parameters.size()).isEqualTo(0);
 
 		module = "foo --x=1 --y=two ";
 		parameters = parse(module).getTaskApp().getArgumentsAsMap();
-		assertEquals(2, parameters.size());
-		assertEquals("1", parameters.get("x"));
-		assertEquals("two", parameters.get("y"));
+		assertThat(parameters.size()).isEqualTo(2);
+		assertThat(parameters.get("x")).isEqualTo("1");
+		assertThat(parameters.get("y")).isEqualTo("two");
 
 		module = "foo --x=1a2b --y=two ";
 		parameters = parse(module).getTaskApp().getArgumentsAsMap();
-		assertEquals(2, parameters.size());
-		assertEquals("1a2b", parameters.get("x"));
-		assertEquals("two", parameters.get("y"));
+		assertThat(parameters.size()).isEqualTo(2);
+		assertThat(parameters.get("x")).isEqualTo("1a2b");
+		assertThat(parameters.get("y")).isEqualTo("two");
 
 		module = "foo --x=2";
 		parameters = parse(module).getTaskApp().getArgumentsAsMap();
-		assertEquals(1, parameters.size());
-		assertEquals("2", parameters.get("x"));
+		assertThat(parameters.size()).isEqualTo(1);
+		assertThat(parameters.get("x")).isEqualTo("2");
 
 		module = "--foo = bar";
 		try {
@@ -149,7 +147,7 @@ public void testInvalidApps() {
 	public void expressions_xd159() {
 		appNode = parse("transform --expression=--payload").getTaskApp();
 		Map props = appNode.getArgumentsAsMap();
-		assertEquals("--payload", props.get("expression"));
+		assertThat(props.get("expression")).isEqualTo("--payload");
 	}
 
 	@Test
@@ -158,7 +156,7 @@ public void expressions_xd159_2() {
 		checkForParseError("transform --expression=new StringBuilder(payload).reverse()", DSLMessage.TASK_MORE_INPUT,
 				27);
 		appNode = parse("transform --expression='new StringBuilder(payload).reverse()'").getTaskApp();
-		assertEquals("new StringBuilder(payload).reverse()", appNode.getArgumentsAsMap().get("expression"));
+		assertThat(appNode.getArgumentsAsMap().get("expression")).isEqualTo("new StringBuilder(payload).reverse()");
 	}
 
 	@Test
@@ -178,17 +176,17 @@ public void ensureTaskNamesValid_xd1344() {
 	public void expressions_xd159_3() {
 		appNode = parse("transform --expression='new StringBuilder(payload).reverse()'").getTaskApp();
 		Map props = appNode.getArgumentsAsMap();
-		assertEquals("new StringBuilder(payload).reverse()", props.get("expression"));
+		assertThat(props.get("expression")).isEqualTo("new StringBuilder(payload).reverse()");
 	}
 
 	@Test
 	public void expressions_xd159_4() {
 		appNode = parse("transform --expression=\"'Hello, world!'\"").getTaskApp();
 		Map props = appNode.getArgumentsAsMap();
-		assertEquals("'Hello, world!'", props.get("expression"));
+		assertThat(props.get("expression")).isEqualTo("'Hello, world!'");
 		appNode = parse("transform --expression='''Hello, world!'''").getTaskApp();
 		props = appNode.getArgumentsAsMap();
-		assertEquals("'Hello, world!'", props.get("expression"));
+		assertThat(props.get("expression")).isEqualTo("'Hello, world!'");
 		// Prior to the change for XD-1613, this error should point to the comma:
 		// checkForParseError("foo | transform --expression=''Hello, world!'' | bar",
 		// DSLMessage.UNEXPECTED_DATA, 37);
@@ -200,21 +198,21 @@ public void expressions_xd159_4() {
 	public void expressions_gh1() {
 		appNode = parse("filter --expression=\"payload == 'foo'\"").getTaskApp();
 		Map props = appNode.getArgumentsAsMap();
-		assertEquals("payload == 'foo'", props.get("expression"));
+		assertThat(props.get("expression")).isEqualTo("payload == 'foo'");
 	}
 
 	@Test
 	public void expressions_gh1_2() {
 		appNode = parse("filter --expression='new Foo()'").getTaskApp();
 		Map props = appNode.getArgumentsAsMap();
-		assertEquals("new Foo()", props.get("expression"));
+		assertThat(props.get("expression")).isEqualTo("new Foo()");
 	}
 
 	@Test
 	public void errorCases01() {
 		checkForParseError(".", DSLMessage.EXPECTED_APPNAME, 0, ".");
-		assertEquals("a-_", parse("foo", "a-_", true).getTaskApp().getName());
-		assertEquals("a_b", parse("foo", "a_b", true).getTaskApp().getName());
+		assertThat(parse("foo", "a-_", true).getTaskApp().getName()).isEqualTo("a-_");
+		assertThat(parse("foo", "a_b", true).getTaskApp().getName()).isEqualTo("a_b");
 		checkForParseError(";", DSLMessage.EXPECTED_APPNAME, 0, ";");
 	}
 
@@ -272,10 +270,10 @@ public void testComposedOptionNameErros() {
 	@Test
 	public void testXD2416() {
 		appNode = parse("transform --expression='payload.replace(\"abc\", \"\")'").getTaskApp();
-		assertEquals(appNode.getArgumentsAsMap().get("expression"), "payload.replace(\"abc\", \"\")");
+		assertThat(appNode.getArgumentsAsMap().get("expression")).isEqualTo("payload.replace(\"abc\", \"\")");
 
 		appNode = parse("transform --expression='payload.replace(\"abc\", '''')'").getTaskApp();
-		assertEquals(appNode.getArgumentsAsMap().get("expression"), "payload.replace(\"abc\", '')");
+		assertThat(appNode.getArgumentsAsMap().get("expression")).isEqualTo("payload.replace(\"abc\", '')");
 	}
 
 	@Test
@@ -294,9 +292,9 @@ private void checkForIllegalTaskName(String taskName, String taskDef) {
 			fail("expected to fail but parsed " + appNode.stringify());
 		}
 		catch (ParseException e) {
-			assertEquals(DSLMessage.ILLEGAL_TASK_NAME, e.getMessageCode());
-			assertEquals(0, e.getPosition());
-			assertEquals(taskName, e.getInserts()[0]);
+			assertThat(e.getMessageCode()).isEqualTo(DSLMessage.ILLEGAL_TASK_NAME);
+			assertThat(e.getPosition()).isEqualTo(0);
+			assertThat(e.getInserts()[0]).isEqualTo(taskName);
 		}
 	}
 
@@ -304,101 +302,91 @@ private void checkForIllegalTaskName(String taskName, String taskDef) {
 	public void executableDsl() {
 		TaskNode ctn = parse("foo", "appA && appB", true);
 		List taskApps = ctn.getTaskApps();
-		assertEquals("appA", taskApps.get(0).getName());
-		assertEquals("foo-appA", taskApps.get(0).getExecutableDSLName());
-		assertEquals("appB", taskApps.get(1).getName());
-		assertEquals("foo-appB", taskApps.get(1).getExecutableDSLName());
+		assertThat(taskApps.get(0).getName()).isEqualTo("appA");
+		assertThat(taskApps.get(0).getExecutableDSLName()).isEqualTo("foo-appA");
+		assertThat(taskApps.get(1).getName()).isEqualTo("appB");
+		assertThat(taskApps.get(1).getExecutableDSLName()).isEqualTo("foo-appB");
 
 		ctn = parse("bar", "appC && goo: appC", true);
 		taskApps = ctn.getTaskApps();
-		assertEquals("appC", taskApps.get(0).getName());
-		assertEquals("bar-appC", taskApps.get(0).getExecutableDSLName());
-		assertEquals("appC", taskApps.get(1).getName());
-		assertEquals("bar-goo", taskApps.get(1).getExecutableDSLName());
+		assertThat(taskApps.get(0).getName()).isEqualTo("appC");
+		assertThat(taskApps.get(0).getExecutableDSLName()).isEqualTo("bar-appC");
+		assertThat(taskApps.get(1).getName()).isEqualTo("appC");
+		assertThat(taskApps.get(1).getExecutableDSLName()).isEqualTo("bar-goo");
 
 		// flows
-		assertEquals("foo-appA", parse("foo", "appA", true).toExecutableDSL());
-		assertEquals("foo-appA && foo-appB", parse("foo", "appA && appB", true).toExecutableDSL());
-		assertEquals("foo-appA && foo-appB && foo-appC", parse("foo", "appA && appB && appC", true).toExecutableDSL());
+		assertThat(parse("foo", "appA", true).toExecutableDSL()).isEqualTo("foo-appA");
+		assertThat(parse("foo", "appA && appB", true).toExecutableDSL()).isEqualTo("foo-appA && foo-appB");
+		assertThat(parse("foo", "appA && appB && appC", true).toExecutableDSL()).isEqualTo("foo-appA && foo-appB && foo-appC");
 
 		assertTaskApps("foo", "appA", "foo-appA");
 		assertTaskApps("foo", "appA && appB", "foo-appA", "foo-appB");
 		assertTaskApps("foo", "appA && appB && appC", "foo-appA", "foo-appB", "foo-appC");
 
 		// arguments
-		assertEquals("foo-appA", parse("foo", "appA --p1=v1 --p2=v2", true).toExecutableDSL());
-		assertEquals("foo-appA && foo-appB", parse("foo", "appA --p2=v2 && appB --p3=v3", true).toExecutableDSL());
+		assertThat(parse("foo", "appA --p1=v1 --p2=v2", true).toExecutableDSL()).isEqualTo("foo-appA");
+		assertThat(parse("foo", "appA --p2=v2 && appB --p3=v3", true).toExecutableDSL()).isEqualTo("foo-appA && foo-appB");
 		assertTaskApps("foo", "appA --p1=v2", "foo-appA:p1=v2");
 		assertTaskApps("foo", "appA --p1=v2 && goo: appB --p2=v2", "foo-appA:p1=v2", "foo-goo:p2=v2");
 		assertTaskApps("foo", "appA 0->x:appA --p1=v1", "foo-appA", "foo-x:p1=v1");
 
 		// labels
-		assertEquals("bar-goo", parse("bar", "goo:appA", true).toExecutableDSL());
-		assertEquals("fo-aaa && fo-bbb", parse("fo", "aaa: appA && bbb: appA", true).toExecutableDSL());
+		assertThat(parse("bar", "goo:appA", true).toExecutableDSL()).isEqualTo("bar-goo");
+		assertThat(parse("fo", "aaa: appA && bbb: appA", true).toExecutableDSL()).isEqualTo("fo-aaa && fo-bbb");
 
 		assertTaskApps("bar", "goo:appA", "bar-goo");
 		assertTaskApps("bar", "appA && goo: appA", "bar-appA", "bar-goo");
 
 		// transitions
-		assertEquals("foo-appA 'c'->foo-appC && foo-appB",
-				parse("foo", "appA 'c'->appC && appB", true).toExecutableDSL());
-		assertEquals("foo-appA 'c'->foo-appC 'd'->foo-appD && foo-appB",
-				parse("foo", "appA 'c'->appC 'd'->appD && " + "appB", true).toExecutableDSL());
-		assertEquals("foo-appA 1->foo-appC 2->foo-appD && foo-appB",
-				parse("foo", "appA 1->appC 2->appD && appB", true).toExecutableDSL());
-		assertEquals("foo-aaa 1->foo-appC 2->:aaa", parse("foo", "aaa: appA 1->appC 2->:aaa", true).toExecutableDSL());
+		assertThat(parse("foo", "appA 'c'->appC && appB", true).toExecutableDSL()).isEqualTo("foo-appA 'c'->foo-appC && foo-appB");
+		assertThat(parse("foo", "appA 'c'->appC 'd'->appD && " + "appB", true).toExecutableDSL()).isEqualTo("foo-appA 'c'->foo-appC 'd'->foo-appD && foo-appB");
+		assertThat(parse("foo", "appA 1->appC 2->appD && appB", true).toExecutableDSL()).isEqualTo("foo-appA 1->foo-appC 2->foo-appD && foo-appB");
+		assertThat(parse("foo", "aaa: appA 1->appC 2->:aaa", true).toExecutableDSL()).isEqualTo("foo-aaa 1->foo-appC 2->:aaa");
 
 		// splits
-		assertEquals("", parse("foo", "", true).toExecutableDSL());
-		assertEquals("",
-				parse("foo", "", true).toExecutableDSL());
-		assertEquals("< || foo-appB>",
-				parse("foo", "< || " + "appB>", true).toExecutableDSL());
-		assertEquals("< || foo-appB>",
-				parse("foo", "< || appB>", true).toExecutableDSL());
+		assertThat(parse("foo", "", true).toExecutableDSL()).isEqualTo("");
+		assertThat(parse("foo", "", true).toExecutableDSL()).isEqualTo("");
+		assertThat(parse("foo", "< || " + "appB>", true).toExecutableDSL()).isEqualTo("< || foo-appB>");
+		assertThat(parse("foo", "< || appB>", true).toExecutableDSL()).isEqualTo("< || foo-appB>");
 
 		// splits and flows
-		assertEquals("foo-AAA && foo-FFF 'FAILED'->foo-EEE &&  && foo-DDD",
-				parse("foo", "AAA && " + "FFF 'FAILED' -> EEE &&  && DDD", true).toExecutableDSL());
+		assertThat(parse("foo", "AAA && " + "FFF 'FAILED' -> EEE &&  && DDD", true).toExecutableDSL()).isEqualTo("foo-AAA && foo-FFF 'FAILED'->foo-EEE &&  && foo-DDD");
 		assertTaskApps("foo", "AAA && FFF 'FAILED' -> EEE &&  && DDD", "foo-AAA", "foo-FFF", "foo-EEE",
 				"foo-BBB", "foo-CCC", "foo-DDD");
-		assertEquals(" && ", parse(" && ", true).toExecutableDSL());
-		assertEquals(" && ",
-				parse(" && ", true).toExecutableDSL());
-		assertEquals(" && test-D", parse(" && D", true).toExecutableDSL());
-		assertEquals(">", parse(">", true).toExecutableDSL());
-		assertEquals(">", parse(">", true).toExecutableDSL());
+		assertThat(parse(" && ", true).toExecutableDSL()).isEqualTo(" && ");
+		assertThat(parse(" && ", true).toExecutableDSL()).isEqualTo(" && ");
+		assertThat(parse(" && D", true).toExecutableDSL()).isEqualTo(" && test-D");
+		assertThat(parse(">", true).toExecutableDSL()).isEqualTo(">");
+		assertThat(parse(">", true).toExecutableDSL()).isEqualTo(">");
 
 		ctn = parse("AAA 0->BBB");
-		List transitions = ((TaskAppNode) ((FlowNode) ctn.getSequences().get(0)).getSeriesElement(0))
+		List transitions = ((TaskAppNode) ctn.getSequences().get(0).getSeriesElement(0))
 				.getTransitions();
-		assertEquals("0", transitions.get(0).getStatusToCheckInDSLForm());
+		assertThat(transitions.get(0).getStatusToCheckInDSLForm()).isEqualTo("0");
 
 		ctn = parse("AAA '0'->BBB");
-		transitions = ((TaskAppNode) ((FlowNode) ctn.getSequences().get(0)).getSeriesElement(0)).getTransitions();
-		assertEquals("'0'", transitions.get(0).getStatusToCheckInDSLForm());
+		transitions = ((TaskAppNode) ctn.getSequences().get(0).getSeriesElement(0)).getTransitions();
+		assertThat(transitions.get(0).getStatusToCheckInDSLForm()).isEqualTo("'0'");
 
 		ctn = parse("AAA *->BBB '*'->CCC");
-		transitions = ((TaskAppNode) ((FlowNode) ctn.getSequences().get(0)).getSeriesElement(0)).getTransitions();
-		assertEquals("*", transitions.get(0).getStatusToCheckInDSLForm());
-		assertEquals("'*'", transitions.get(1).getStatusToCheckInDSLForm());
+		transitions = ((TaskAppNode) ctn.getSequences().get(0).getSeriesElement(0)).getTransitions();
+		assertThat(transitions.get(0).getStatusToCheckInDSLForm()).isEqualTo("*");
+		assertThat(transitions.get(1).getStatusToCheckInDSLForm()).isEqualTo("'*'");
 
-		assertEquals("test-AAA 'failed'->test-BBB *->test-CCC",
-				parse("AAA 'failed' -> BBB * -> CCC").toExecutableDSL());
-		assertEquals("test-AAA 'failed'->test-BBB '*'->test-CCC",
-				parse("AAA 'failed' -> BBB '*' -> CCC").toExecutableDSL());
-		assertEquals("test-AAA 1->test-BBB 2->test-CCC", parse("AAA 1 -> BBB 2 -> CCC").toExecutableDSL());
+		assertThat(parse("AAA 'failed' -> BBB * -> CCC").toExecutableDSL()).isEqualTo("test-AAA 'failed'->test-BBB *->test-CCC");
+		assertThat(parse("AAA 'failed' -> BBB '*' -> CCC").toExecutableDSL()).isEqualTo("test-AAA 'failed'->test-BBB '*'->test-CCC");
+		assertThat(parse("AAA 1 -> BBB 2 -> CCC").toExecutableDSL()).isEqualTo("test-AAA 1->test-BBB 2->test-CCC");
 	}
 
 	@Test
 	public void isComposedTask() {
 		ctn = parse("appA 'foo' -> appB");
-		assertTrue(ctn.isComposed());
-		assertNull(ctn.getTaskApp());
+		assertThat(ctn.isComposed()).isTrue();
+		assertThat(ctn.getTaskApp()).isNull();
 		assertGraph("[0:START][1:appA][2:appB][3:END][0-1][foo:1-2][1-3][2-3]", "appA 'foo' -> appB");
 		ctn = parse("appA");
-		assertFalse(ctn.isComposed());
-		assertNotNull(ctn.getTaskApp());
+		assertThat(ctn.isComposed()).isFalse();
+		assertThat(ctn.getTaskApp()).isNotNull();
 	}
 
 	@Test
@@ -432,35 +420,35 @@ public void tokenStreams() {
 	@Test
 	public void singleApp() {
 		ctn = parse("FooApp");
-		assertEquals("FooApp", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(6, ctn.getEndPos());
-		assertEquals("FooApp", ctn.stringify());
+		assertThat(ctn.getTaskText()).isEqualTo("FooApp");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(6);
+		assertThat(ctn.stringify()).isEqualTo("FooApp");
 		LabelledTaskNode node = ctn.getStart();
-		assertFalse(node.isSplit());
-		assertTrue(node.isFlow());
+		assertThat(node.isSplit()).isFalse();
+		assertThat(node.isFlow()).isTrue();
 		assertFlow(node, "FooApp");
-		assertTrue(((FlowNode) node).getSeriesElement(0).isTaskApp());
+		assertThat(node.getSeriesElement(0).isTaskApp()).isTrue();
 	}
 
 	@Test
 	public void twoAppFlow() {
 		ctn = parse("FooApp  &&  BarApp");
 
-		assertEquals("FooApp  &&  BarApp", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(18, ctn.getEndPos());
-		assertEquals("FooApp && BarApp", ctn.stringify());
+		assertThat(ctn.getTaskText()).isEqualTo("FooApp  &&  BarApp");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(18);
+		assertThat(ctn.stringify()).isEqualTo("FooApp && BarApp");
 
 		LabelledTaskNode node = ctn.getStart();
-		assertFalse(node.isSplit());
-		assertTrue(node.isFlow());
-		assertFalse(node.isTaskApp());
+		assertThat(node.isSplit()).isFalse();
+		assertThat(node.isFlow()).isTrue();
+		assertThat(node.isTaskApp()).isFalse();
 
 		FlowNode flow = (FlowNode) node;
 		List series = flow.getSeries();
-		assertEquals(2, series.size());
-		assertEquals(2, flow.getSeriesLength());
+		assertThat(series.size()).isEqualTo(2);
+		assertThat(flow.getSeriesLength()).isEqualTo(2);
 		assertTaskApp(series.get(0), "FooApp");
 		assertTaskApp(flow.getSeriesElement(0), "FooApp");
 		assertTaskApp(series.get(1), "BarApp");
@@ -491,21 +479,21 @@ public void appsInTaskDef() {
 	public void oneAppSplit() {
 		ctn = parse("< FooApp>");
 
-		assertEquals("< FooApp>", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(9, ctn.getEndPos());
-		assertEquals("", ctn.stringify());
+		assertThat(ctn.getTaskText()).isEqualTo("< FooApp>");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(9);
+		assertThat(ctn.stringify()).isEqualTo("");
 
 		LabelledTaskNode node = ctn.getStart();
-		assertTrue(node.isFlow());
-		node = ((FlowNode) node).getSeriesElement(0);
-		assertTrue(node.isSplit());
-		assertFalse(node.isTaskApp());
+		assertThat(node.isFlow()).isTrue();
+		node = node.getSeriesElement(0);
+		assertThat(node.isSplit()).isTrue();
+		assertThat(node.isTaskApp()).isFalse();
 
 		SplitNode split = (SplitNode) node;
 		List series = split.getSeries();
-		assertEquals(1, series.size());
-		assertEquals(1, split.getSeriesLength());
+		assertThat(series.size()).isEqualTo(1);
+		assertThat(split.getSeriesLength()).isEqualTo(1);
 		assertFlow(series.get(0), "FooApp");
 		assertFlow(split.getSeriesElement(0), "FooApp");
 	}
@@ -514,21 +502,21 @@ public void oneAppSplit() {
 	public void twoAppSplit() {
 		ctn = parse("< FooApp  ||    BarApp>");
 
-		assertEquals("< FooApp  ||    BarApp>", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(23, ctn.getEndPos());
-		assertEquals("", ctn.stringify());
+		assertThat(ctn.getTaskText()).isEqualTo("< FooApp  ||    BarApp>");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(23);
+		assertThat(ctn.stringify()).isEqualTo("");
 
 		LabelledTaskNode node = ctn.getStart();
-		assertTrue(node.isFlow());
-		node = ((FlowNode) node).getSeriesElement(0);
-		assertTrue(node.isSplit());
-		assertFalse(node.isTaskApp());
+		assertThat(node.isFlow()).isTrue();
+		node = node.getSeriesElement(0);
+		assertThat(node.isSplit()).isTrue();
+		assertThat(node.isTaskApp()).isFalse();
 
 		SplitNode split = (SplitNode) node;
 		List series = split.getSeries();
-		assertEquals(2, series.size());
-		assertEquals(2, split.getSeriesLength());
+		assertThat(series.size()).isEqualTo(2);
+		assertThat(split.getSeriesLength()).isEqualTo(2);
 		assertFlow(series.get(0), "FooApp");
 		assertFlow(split.getSeriesElement(0), "FooApp");
 		assertFlow(series.get(1), "BarApp");
@@ -538,93 +526,93 @@ public void twoAppSplit() {
 	@Test
 	public void appWithOneTransition() {
 		ctn = parse("App1 0->App2");
-		assertEquals("test", ctn.getName());
-		assertEquals("App1 0->App2", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(12, ctn.getEndPos());
-		assertEquals("App1 0->App2", ctn.stringify());
+		assertThat(ctn.getName()).isEqualTo("test");
+		assertThat(ctn.getTaskText()).isEqualTo("App1 0->App2");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(12);
+		assertThat(ctn.stringify()).isEqualTo("App1 0->App2");
 		LabelledTaskNode firstNode = ctn.getStart();
-		assertTrue(firstNode.isFlow());
-		List transitions = ((TaskAppNode) ((FlowNode) firstNode).getSeriesElement(0)).getTransitions();
-		assertEquals(1, transitions.size());
+		assertThat(firstNode.isFlow()).isTrue();
+		List transitions = ((TaskAppNode) firstNode.getSeriesElement(0)).getTransitions();
+		assertThat(transitions.size()).isEqualTo(1);
 		TransitionNode transition = transitions.get(0);
-		assertEquals("0", transition.getStatusToCheck());
-		assertEquals("App2", transition.getTargetDslText());
-		assertEquals(5, transition.getStartPos());
-		assertEquals(12, transition.getEndPos());
+		assertThat(transition.getStatusToCheck()).isEqualTo("0");
+		assertThat(transition.getTargetDslText()).isEqualTo("App2");
+		assertThat(transition.getStartPos()).isEqualTo(5);
+		assertThat(transition.getEndPos()).isEqualTo(12);
 	}
 
 	@Test
 	public void appWithTwoTransitions() {
 		ctn = parse("App1 0->App2 'abc' ->   App3");
-		assertEquals("App1 0->App2 'abc' ->   App3", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(28, ctn.getEndPos());
-		assertEquals("App1 0->App2 'abc'->App3", ctn.stringify());
+		assertThat(ctn.getTaskText()).isEqualTo("App1 0->App2 'abc' ->   App3");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(28);
+		assertThat(ctn.stringify()).isEqualTo("App1 0->App2 'abc'->App3");
 		LabelledTaskNode node = ctn.getStart();
-		assertTrue(node.isFlow());
-		node = ((FlowNode) node).getSeriesElement(0);
+		assertThat(node.isFlow()).isTrue();
+		node = node.getSeriesElement(0);
 		List transitions = ((TaskAppNode) node).getTransitions();
-		assertEquals(2, transitions.size());
+		assertThat(transitions.size()).isEqualTo(2);
 		TransitionNode transition = transitions.get(0);
-		assertEquals("0", transition.getStatusToCheck());
-		assertTrue(transition.isExitCodeCheck());
-		assertEquals("App2", transition.getTargetDslText());
-		assertEquals(5, transition.getStartPos());
-		assertEquals(12, transition.getEndPos());
+		assertThat(transition.getStatusToCheck()).isEqualTo("0");
+		assertThat(transition.isExitCodeCheck()).isTrue();
+		assertThat(transition.getTargetDslText()).isEqualTo("App2");
+		assertThat(transition.getStartPos()).isEqualTo(5);
+		assertThat(transition.getEndPos()).isEqualTo(12);
 		transition = transitions.get(1);
-		assertEquals("abc", transition.getStatusToCheck());
-		assertFalse(transition.isExitCodeCheck());
-		assertEquals("App3", transition.getTargetDslText());
-		assertEquals(13, transition.getStartPos());
-		assertEquals(28, transition.getEndPos());
+		assertThat(transition.getStatusToCheck()).isEqualTo("abc");
+		assertThat(transition.isExitCodeCheck()).isFalse();
+		assertThat(transition.getTargetDslText()).isEqualTo("App3");
+		assertThat(transition.getStartPos()).isEqualTo(13);
+		assertThat(transition.getEndPos()).isEqualTo(28);
 	}
 
 	@Test
 	public void appWithWildcardTransitions() {
 		ctn = parse("App1 *->App2 '*'->App3");
-		assertEquals("App1 *->App2 '*'->App3", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(22, ctn.getEndPos());
-		assertEquals("App1 *->App2 '*'->App3", ctn.stringify());
+		assertThat(ctn.getTaskText()).isEqualTo("App1 *->App2 '*'->App3");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(22);
+		assertThat(ctn.stringify()).isEqualTo("App1 *->App2 '*'->App3");
 		LabelledTaskNode node = ctn.getStart();
-		node = ((FlowNode) node).getSeriesElement(0);
-		assertTrue(node.isTaskApp());
+		node = node.getSeriesElement(0);
+		assertThat(node.isTaskApp()).isTrue();
 		List transitions = ((TaskAppNode) node).getTransitions();
-		assertEquals(2, transitions.size());
+		assertThat(transitions.size()).isEqualTo(2);
 
 		TransitionNode transition = transitions.get(0);
-		assertEquals("*", transition.getStatusToCheck());
-		assertTrue(transition.isExitCodeCheck());
-		assertEquals("App2", transition.getTargetDslText());
-		assertEquals(5, transition.getStartPos());
-		assertEquals(12, transition.getEndPos());
+		assertThat(transition.getStatusToCheck()).isEqualTo("*");
+		assertThat(transition.isExitCodeCheck()).isTrue();
+		assertThat(transition.getTargetDslText()).isEqualTo("App2");
+		assertThat(transition.getStartPos()).isEqualTo(5);
+		assertThat(transition.getEndPos()).isEqualTo(12);
 		transition = transitions.get(1);
-		assertEquals("*", transition.getStatusToCheck());
-		assertFalse(transition.isExitCodeCheck());
-		assertEquals("App3", transition.getTargetDslText());
-		assertEquals(13, transition.getStartPos());
-		assertEquals(22, transition.getEndPos());
+		assertThat(transition.getStatusToCheck()).isEqualTo("*");
+		assertThat(transition.isExitCodeCheck()).isFalse();
+		assertThat(transition.getTargetDslText()).isEqualTo("App3");
+		assertThat(transition.getStartPos()).isEqualTo(13);
+		assertThat(transition.getEndPos()).isEqualTo(22);
 	}
 
 	@Test
 	public void appWithLabelReferenceTransition() {
 		ctn = parse("App1 'foo'->:something", false);
-		assertEquals("App1 'foo'->:something", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(22, ctn.getEndPos());
-		assertEquals("App1 'foo'->:something", ctn.stringify());
+		assertThat(ctn.getTaskText()).isEqualTo("App1 'foo'->:something");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(22);
+		assertThat(ctn.stringify()).isEqualTo("App1 'foo'->:something");
 		LabelledTaskNode firstNode = ctn.getStart();
 		assertFlow(firstNode, "App1");
-		List transitions = ((TaskAppNode) ((FlowNode) firstNode).getSeriesElement(0)).getTransitions();
-		assertEquals(1, transitions.size());
+		List transitions = ((TaskAppNode) firstNode.getSeriesElement(0)).getTransitions();
+		assertThat(transitions.size()).isEqualTo(1);
 		TransitionNode transition = transitions.get(0);
-		assertEquals("foo", transition.getStatusToCheck());
-		assertFalse(transition.isExitCodeCheck());
-		assertEquals(":something", transition.getTargetDslText());
-		assertEquals("something", transition.getTargetLabel());
-		assertEquals(5, transition.getStartPos());
-		assertEquals(22, transition.getEndPos());
+		assertThat(transition.getStatusToCheck()).isEqualTo("foo");
+		assertThat(transition.isExitCodeCheck()).isFalse();
+		assertThat(transition.getTargetDslText()).isEqualTo(":something");
+		assertThat(transition.getTargetLabel()).isEqualTo("something");
+		assertThat(transition.getStartPos()).isEqualTo(5);
+		assertThat(transition.getEndPos()).isEqualTo(22);
 	}
 
 	@Test
@@ -640,29 +628,29 @@ public void splitMainComposedTaskOverMultipleLines() {
 		ctn = parse("FooApp\n 0\n->:a\n 1->:b\n &&\nBarApp 2->:c 3->:d", false);
 		assertFlow(ctn.getStart(), "FooApp", "BarApp");
 		ctn = parse("");
-		assertSplit(((FlowNode) ctn.getStart()).getSeriesElement(0), "FooApp", "BarApp");
+		assertSplit(ctn.getStart().getSeriesElement(0), "FooApp", "BarApp");
 		ctn = parse("<\nFooApp ||\nBarApp\n>");
-		assertSplit(((FlowNode) ctn.getStart()).getSeriesElement(0), "FooApp", "BarApp");
+		assertSplit(ctn.getStart().getSeriesElement(0), "FooApp", "BarApp");
 	}
 
 	@Test
 	public void labelledElement() {
 		ctn = parse("foo: appA");
 		LabelledTaskNode start = ctn.getStart();
-		assertEquals("foo", start.getLabelString());
+		assertThat(start.getLabelString()).isEqualTo("foo");
 		FlowNode f = (FlowNode) start;
-		assertEquals("foo", f.getLabelString());
-		assertEquals("appA", ((TaskAppNode) f.getSeriesElement(0)).getName());
+		assertThat(f.getLabelString()).isEqualTo("foo");
+		assertThat(((TaskAppNode) f.getSeriesElement(0)).getName()).isEqualTo("appA");
 
 		ctn = parse("foo: ");
 		start = ctn.getStart();
-		assertEquals("foo", start.getLabelString());
-		SplitNode s = (SplitNode) ((FlowNode) start).getSeriesElement(0);
+		assertThat(start.getLabelString()).isEqualTo("foo");
+		SplitNode s = (SplitNode) start.getSeriesElement(0);
 		assertSplit(s, "appA", "appB");
 
 		ctn = parse("foo: appA && appB");
 		start = ctn.getStart();
-		assertEquals("foo", start.getLabelString());
+		assertThat(start.getLabelString()).isEqualTo("foo");
 		assertFlow(start, "appA", "appB");
 	}
 
@@ -704,7 +692,7 @@ public void twoReferencesToSecondarySequence() {
 				+ "[0-1][1-2][2-3][3-4][fail:1-9][fail2:2-9][9-10][10-4]", spec);
 	}
 
-	@Ignore
+	@Disabled
 	@Test
 	public void transitionToSplit() {
 		String spec = "aa 'foo'->:split && bb && split:  && ee";
@@ -718,18 +706,16 @@ public void transitionToNonResolvedLabel() {
 		String spec = "aa 'foo'->:split && bb && cc";
 		TaskNode ctn = parse(spec, false);
 		List validationProblems = ctn.validate();
-		assertEquals(1, validationProblems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_TRANSITION_TARGET_LABEL_UNDEFINED,
-				validationProblems.get(0).getMessage());
-		assertEquals(3, validationProblems.get(0).getOffset());
+		assertThat(validationProblems.size()).isEqualTo(1);
+		assertThat(validationProblems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_TRANSITION_TARGET_LABEL_UNDEFINED);
+		assertThat(validationProblems.get(0).getOffset()).isEqualTo(3);
 
 		spec = ":split && bb && cc || dd>";
 		ctn = parse(spec, false);
 		validationProblems = ctn.validate();
-		assertEquals(1, validationProblems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_TRANSITION_TARGET_LABEL_UNDEFINED,
-				validationProblems.get(0).getMessage());
-		assertEquals(4, validationProblems.get(0).getOffset());
+		assertThat(validationProblems.size()).isEqualTo(1);
+		assertThat(validationProblems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_TRANSITION_TARGET_LABEL_UNDEFINED);
+		assertThat(validationProblems.get(0).getOffset()).isEqualTo(4);
 	}
 
 	@Test
@@ -737,52 +723,50 @@ public void visitors() {
 		ctn = parse("appA");
 		TestVisitor tv = new TestVisitor();
 		ctn.accept(tv);
-		assertEquals(">SN[0] >F =F >TA =TA[appA] SN[0] >F =F >TA =TA[appA] SN[foo: 0] >F =F[foo:] >TA =TA[foo: appA] SN[foo: 0] >F =F[foo:] >TA =TA[foo: appA] SN[0] >F =F >TA =TA[appA] TA =TA[appB] SN[0] >F =F >TA =TA[appA] TA =TA[appB] ");
 		tv.reset();
 		ctn.accept(tv);
-		assertEquals(">SN[0] >F =F >S =S >F =F >TA =TA[appA] F =F >TA =TA[appB] SN[0] >F =F >S =S >F =F >TA =TA[appA] F =F >TA =TA[appB] ");
 		tv.reset();
 		ctn.accept(tv);
-		assertEquals(">SN[0] >F =F >S =S >F =F >TA =TA[appA] TA =TA[appB] F =F >TA =TA[appC] SN[0] >F =F >S =S >F =F >TA =TA[appA] TA =TA[appB] F =F >TA =TA[appC] :foo", false);
 		tv.reset();
 		ctn.accept(tv);
-		assertEquals(">SN[0] >F =F >TA =TA[appA] >T =T[0->:foo] SN[0] >F =F >TA =TA[appA] >T =T[0->:foo] appB");
 		tv.reset();
 		ctn.accept(tv);
-		assertEquals(">SN[0] >F =F >TA =TA[appA] >T =T[0->appB] SN[0] >F =F >TA =TA[appA] >T =T[0->appB] SN[0] >F =F >TA =TA[appA] SN[1] >F =F >TA =TA[appB] SN[0] >F =F >TA =TA[appA] SN[1] >F =F >TA =TA[appB] :foo *->appC;foo: appD && appE", false);
 		assertApps(ctn.getTaskApps(), "appA", "appB", "appC", "foo:appD", "appE");
 		tv.reset();
 		ctn.accept(tv);
-		assertEquals(">SN[0] >F =F >TA =TA[appA] TA =TA[appB] >T =T[0->:foo] T =T[*->appC] SN[foo: 1] >F =F[foo:] >TA =TA[foo: appD] TA =TA[appE] SN[0] >F =F >TA =TA[appA] TA =TA[appB] >T =T[0->:foo] T =T[*->appC] SN[foo: 1] >F =F[foo:] >TA =TA[foo: appD] TA =TA[appE] :foo\n  *->appB\n  && appE;foo: appC && appD");
 		LabelledTaskNode start = ctn.getStart(); // get the root of the AST starting appA
-		assertNotNull(start);
+		assertThat(start).isNotNull();
 		List sequences = ctn.getSequences();
 		LabelledTaskNode labelledTaskNode = sequences.get(1);
-		assertEquals("foo", labelledTaskNode.getLabelString());
+		assertThat(labelledTaskNode.getLabelString()).isEqualTo("foo");
 		LabelledTaskNode fooSequence = ctn.getSequenceWithLabel("foo"); // get the AST for foo: ...
-		assertNotNull(fooSequence);
+		assertThat(fooSequence).isNotNull();
 		TestVisitor tv = new TestVisitor();
 		ctn.accept(tv);
-		assertEquals(">SN[0] >F =F >TA =TA[appA] >T =T[0->:foo] T =T[*->appB] TA =TA[appE] SN[foo: 1] >F =F[foo:] >TA =TA[foo: appC] TA =TA[appD] SN[0] >F =F >TA =TA[appA] >T =T[0->:foo] T =T[*->appB] TA =TA[appE] SN[foo: 1] >F =F[foo:] >TA =TA[foo: appC] TA =TA[appD]  problems = validator.getProblems();
-		assertEquals(1, problems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_SECONDARY_SEQUENCES_MUST_BE_NAMED, problems.get(0).getMessage());
-		assertEquals(5, problems.get(0).getOffset());
-		assertEquals("158E:(pos 5): secondary sequences must have labels or are unreachable",
-				problems.get(0).toString());
-		assertEquals("158E:(pos 5): secondary sequences must have labels or are unreachable\nappA;appB\n     ^\n",
-				problems.get(0).toStringWithContext());
+		assertThat(problems.size()).isEqualTo(1);
+		assertThat(problems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_SECONDARY_SEQUENCES_MUST_BE_NAMED);
+		assertThat(problems.get(0).getOffset()).isEqualTo(5);
+		assertThat(problems.get(0).toString()).isEqualTo("158E:(pos 5): secondary sequences must have labels or are unreachable");
+		assertThat(problems.get(0).toStringWithContext()).isEqualTo("158E:(pos 5): secondary sequences must have labels or are unreachable\nappA;appB\n     ^\n");
 
 		validator.reset();
 		ctn = parse("appA;foo: appB");
 		ctn.accept(validator);
-		assertFalse(validator.hasProblems());
+		assertThat(validator.hasProblems()).isFalse();
 
 		validator.reset();
 		ctn = parse("appA;foo: appB\nappC", false);
 		ctn.accept(validator);
 		problems = validator.getProblems();
-		assertEquals(1, problems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_SECONDARY_SEQUENCES_MUST_BE_NAMED, problems.get(0).getMessage());
-		assertEquals(15, problems.get(0).getOffset());
-		assertEquals("158E:(pos 15): secondary sequences must have labels or are unreachable",
-				problems.get(0).toString());
-		assertEquals("158E:(pos 15): secondary sequences must have labels or are unreachable\nappC\n^\n",
-				problems.get(0).toStringWithContext());
+		assertThat(problems.size()).isEqualTo(1);
+		assertThat(problems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_SECONDARY_SEQUENCES_MUST_BE_NAMED);
+		assertThat(problems.get(0).getOffset()).isEqualTo(15);
+		assertThat(problems.get(0).toString()).isEqualTo("158E:(pos 15): secondary sequences must have labels or are unreachable");
+		assertThat(problems.get(0).toStringWithContext()).isEqualTo("158E:(pos 15): secondary sequences must have labels or are unreachable\nappC\n^\n");
 
 		validator.reset();
 		ctn = parse("appA && appA", false);
 		ctn.accept(validator);
 		problems = validator.getProblems();
-		assertEquals(1, problems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE, problems.get(0).getMessage());
-		assertEquals(8, problems.get(0).getOffset());
+		assertThat(problems.size()).isEqualTo(1);
+		assertThat(problems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE);
+		assertThat(problems.get(0).getOffset()).isEqualTo(8);
 		validator.reset();
 		ctn = parse("appA 'foo' -> appA", false);
 		ctn.accept(validator);
 		problems = validator.getProblems();
-		assertEquals(1, problems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE, problems.get(0).getMessage());
-		assertEquals(14, problems.get(0).getOffset());
+		assertThat(problems.size()).isEqualTo(1);
+		assertThat(problems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE);
+		assertThat(problems.get(0).getOffset()).isEqualTo(14);
 		validator.reset();
 		ctn = parse("appA 'foo' -> appA: appB", false);
 		ctn.accept(validator);
 		problems = validator.getProblems();
-		assertEquals(1, problems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_LABEL_CLASHES_WITH_TASKAPP_NAME, problems.get(0).getMessage());
-		assertEquals(14, problems.get(0).getOffset());
+		assertThat(problems.size()).isEqualTo(1);
+		assertThat(problems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_LABEL_CLASHES_WITH_TASKAPP_NAME);
+		assertThat(problems.get(0).getOffset()).isEqualTo(14);
 		validator.reset();
 		ctn = parse("label1: appA 'foo' -> label1: appB", false);
 		ctn.accept(validator);
 		problems = validator.getProblems();
-		assertEquals(1, problems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_DUPLICATE_LABEL, problems.get(0).getMessage());
-		assertEquals(22, problems.get(0).getOffset());
+		assertThat(problems.size()).isEqualTo(1);
+		assertThat(problems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_DUPLICATE_LABEL);
+		assertThat(problems.get(0).getOffset()).isEqualTo(22);
 		validator.reset();
 		ctn = parse("label1: appA 'foo' -> label1", false);
 		ctn.accept(validator);
 		problems = validator.getProblems();
-		assertEquals(1, problems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_APP_NAME_CLASHES_WITH_LABEL, problems.get(0).getMessage());
-		assertEquals(22, problems.get(0).getOffset());
+		assertThat(problems.size()).isEqualTo(1);
+		assertThat(problems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_APP_NAME_CLASHES_WITH_LABEL);
+		assertThat(problems.get(0).getOffset()).isEqualTo(22);
 	}
 
 	@Test
@@ -884,30 +864,30 @@ public void labels() {
 		// basic task
 		ctn = parse("aaa: appA");
 		LabelledTaskNode flow = ctn.getStart();
-		assertEquals("aaa", flow.getLabelString());
-		TaskAppNode taskApp = (TaskAppNode) ((FlowNode) flow).getSeriesElement(0);
-		assertEquals("aaa", taskApp.getLabelString());
+		assertThat(flow.getLabelString()).isEqualTo("aaa");
+		TaskAppNode taskApp = (TaskAppNode) flow.getSeriesElement(0);
+		assertThat(taskApp.getLabelString()).isEqualTo("aaa");
 
 		// flows
 		ctn = parse("aaa: appA && bbb: appB");
-		taskApp = (TaskAppNode) ((FlowNode) ctn.getStart()).getSeriesElement(1);
-		assertEquals("bbb", taskApp.getLabelString());
+		taskApp = (TaskAppNode) ctn.getStart().getSeriesElement(1);
+		assertThat(taskApp.getLabelString()).isEqualTo("bbb");
 
 		// splits
 		ctn = parse("outer:");
-		flow = (FlowNode) ctn.getStart();
-		assertEquals("outer", flow.getLabelString());
+		flow = ctn.getStart();
+		assertThat(flow.getLabelString()).isEqualTo("outer");
 		SplitNode s = (SplitNode) flow.getSeriesElement(0);
-		assertEquals("outer", s.getLabelString());
-		taskApp = (TaskAppNode) (((FlowNode) s.getSeriesElement(0)).getSeriesElement(0));
-		assertEquals("aaa", taskApp.getLabelString());
-		taskApp = (TaskAppNode) (((FlowNode) s.getSeriesElement(1)).getSeriesElement(0));
-		assertEquals("bbb", taskApp.getLabelString());
+		assertThat(s.getLabelString()).isEqualTo("outer");
+		taskApp = (TaskAppNode) (s.getSeriesElement(0).getSeriesElement(0));
+		assertThat(taskApp.getLabelString()).isEqualTo("aaa");
+		taskApp = (TaskAppNode) (s.getSeriesElement(1).getSeriesElement(0));
+		assertThat(taskApp.getLabelString()).isEqualTo("bbb");
 
 		// parentheses
 		ctn = parse("(aaa: appA && appB)");
-		taskApp = (TaskAppNode) ((FlowNode) ctn.getStart()).getSeriesElement(0);
-		assertEquals("aaa", taskApp.getLabelString());
+		taskApp = (TaskAppNode) ctn.getStart().getSeriesElement(0);
+		assertThat(taskApp.getLabelString()).isEqualTo("aaa");
 
 		checkForParseError("aaa: (appA)", DSLMessage.TASK_NO_LABELS_ON_PARENS, 5);
 		checkForParseError("aaa: bbb: appA", DSLMessage.NO_DOUBLE_LABELS, 5);
@@ -937,7 +917,7 @@ public void graphToText_1712() {
 		graph.nodes.get(2).metadata = new HashMap<>();
 		graph.links.get(0).properties = new HashMap<>();
 		graph.links.get(1).properties = new HashMap<>();
-		assertEquals("timestamp", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("timestamp");
 	}
 	
 	@Test
@@ -950,17 +930,17 @@ public void graphToText_3667() {
 
 		TaskNode ctn = parse("t1: timestamp 'FAILED'->t2: timestamp && t3: timestamp");
 		Graph graph = ctn.toGraph();
-		assertEquals("t1: timestamp 'FAILED'->t2: timestamp && t3: timestamp", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("t1: timestamp 'FAILED'->t2: timestamp && t3: timestamp");
 		
 		ctn = parse("t1: timestamp --format=aabbcc 'FAILED'->t2: timestamp && t3: timestamp --format=gghhii");
 		graph = ctn.toGraph();
-		assertEquals("t1: timestamp --format=aabbcc 'FAILED'->t2: timestamp && t3: timestamp --format=gghhii", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("t1: timestamp --format=aabbcc 'FAILED'->t2: timestamp && t3: timestamp --format=gghhii");
 
 		ctn = parse("t1: timestamp --format=aabbcc 'FAILED'->t2: timestamp --format=ddeeff && t3: timestamp --format=gghhii");
 		graph = ctn.toGraph();
 		Node node = graph.nodes.get(2);
-		assertEquals("ddeeff",node.properties.get("format"));
-		assertEquals("t1: timestamp --format=aabbcc 'FAILED'->t2: timestamp --format=ddeeff && t3: timestamp --format=gghhii", graph.toDSLText());
+		assertThat(node.properties.get("format")).isEqualTo("ddeeff");
+		assertThat(graph.toDSLText()).isEqualTo("t1: timestamp --format=aabbcc 'FAILED'->t2: timestamp --format=ddeeff && t3: timestamp --format=gghhii");
 		
 		assertGraph("[0:START][1:eee:timestamp:format=ttt][2:QQQQQ:timestamp:format=NOT-IN-TEXT][3:ooo:timestamp:format=yyyy][4:END][0-1][FAILED:1-2][1-3][3-4][2-4]",
 				    "eee: timestamp --format=ttt 'FAILED'->QQQQQ: timestamp --format=NOT-IN-TEXT && ooo: timestamp --format=yyyy");
@@ -974,22 +954,22 @@ public void graphToTextSingleAppInSplit() {
 		assertGraph("[0:START][1:AppA][2:END][0-1][1-2]","");
 		TaskNode ctn = parse("");
 		Graph graph = ctn.toGraph();
-		assertEquals("AppA", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("AppA");
 		
 		assertGraph("[0:START][1:AppA][2:AppB][3:END][0-1][1-2][2-3]"," && AppB");
 		ctn = parse(" && AppB");
 		graph = ctn.toGraph();
-		assertEquals("AppA && AppB", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("AppA && AppB");
 		
 		assertGraph("[0:START][1:AppA][2:AppC][3:AppB][4:END][0-1][99:1-2][1-3][2-3][3-4]"," AppC> && AppB");
 		ctn = parse("AppC> && AppB");
 		graph = ctn.toGraph();
-		assertEquals("AppC> && AppB", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("AppC> && AppB");
 
 		// Check it still does the right thing when the split does have multple:
 		ctn = parse("AppC || AppD> && AppB");
 		graph = ctn.toGraph();
-		assertEquals("AppC || AppD> && AppB", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("AppC || AppD> && AppB");
 		
 		// This is the test specifically for issue 3263
 		ctn = parse("T2: timestamp 'Error'->T1: timestamp> && Backwards: timestamp");
@@ -998,7 +978,7 @@ public void graphToTextSingleAppInSplit() {
 		assertGraph("[0:START][1:Import:timestamp][2:T2:timestamp][3:T1:timestamp][4:Backwards:timestamp][5:END][0-1][Error2:1-2][Error:1-3][1-4][2-4][3-4][4-5]",
 			"T2: timestamp 'Error'->T1: timestamp> && Backwards: timestamp");
 		graph = ctn.toGraph();
-		assertEquals("T2: timestamp 'Error'->T1: timestamp> && Backwards: timestamp", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("T2: timestamp 'Error'->T1: timestamp> && Backwards: timestamp");
 		
 		// This is the variant of the above without the <...>
 		// Now notice the links from the transition nodes go direct to END
@@ -1006,7 +986,7 @@ public void graphToTextSingleAppInSplit() {
 		assertGraph("[0:START][1:Import:timestamp][2:T2:timestamp][3:T1:timestamp][4:Backwards:timestamp][5:END][0-1][Error2:1-2][Error:1-3][1-4][4-5][2-5][3-5]",
 			"Import: timestamp 'Error2'->T2: timestamp 'Error'->T1: timestamp && Backwards: timestamp");
 		graph = ctn.toGraph();
-		assertEquals("Import: timestamp 'Error2'->T2: timestamp 'Error'->T1: timestamp && Backwards: timestamp", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("Import: timestamp 'Error2'->T2: timestamp 'Error'->T1: timestamp && Backwards: timestamp");
 	}
 
 	@Test
@@ -1088,16 +1068,16 @@ public void errorExpectDoubleOr() {
 	public void modeError() {
 		try {
 			new TaskParser("foo", "appA --p1=v1", false, true).parse();
-			fail();
+			fail("");
 		}
 		catch (CheckPointedParseException cppe) {
-			assertEquals(DSLMessage.TASK_ARGUMENTS_NOT_ALLOWED_UNLESS_IN_APP_MODE, cppe.message);
+			assertThat(cppe.message).isEqualTo(DSLMessage.TASK_ARGUMENTS_NOT_ALLOWED_UNLESS_IN_APP_MODE);
 		}
 		try {
 			new TaskParser("foo", "appA --p1=v1", true, true).parse();
 		}
 		catch (CheckPointedParseException cppe) {
-			fail();
+			fail("");
 		}
 	}
 
@@ -1110,7 +1090,7 @@ public void unexpectedDoubleAnd() {
 	public void toDSLTextTransitions() {
 		// [SHOULD-VALIDATE] There is no real route to bbb
 		String spec = "aaa '*'->$END && bbb";
-		assertEquals(spec, parse(spec).toDSL());
+		assertThat(parse(spec).toDSL()).isEqualTo(spec);
 		assertGraph("[0:START][1:aaa][2:$END][3:bbb][4:END]" + "[0-1][*:1-2][1-3][3-4]", spec);
 		checkDSLToGraphAndBackToDSL(spec);
 	}
@@ -1169,9 +1149,9 @@ public void toDSLTextManualSync() {
 
 	@Test
 	public void whitespace() {
-		assertEquals("A && B", parse("A&&B").stringify());
-		assertEquals("", parse("").stringify());
-		assertEquals("", parse("").stringify());
+		assertThat(parse("A&&B").stringify()).isEqualTo("A && B");
+		assertThat(parse("").stringify()).isEqualTo("");
+		assertThat(parse("").stringify()).isEqualTo("");
 	}
 
 	@Test
@@ -1192,7 +1172,7 @@ public void missingQuotes() {
 	@Test
 	public void parentheses2() {
 		TaskNode ctn = parse("<(jobA && jobB && jobC) || boo: jobC>");
-		assertEquals("", ctn.stringify());
+		assertThat(ctn.stringify()).isEqualTo("");
 	}
 
 	@Test
@@ -1208,53 +1188,52 @@ public void funnyJobNames() {
 	public void names() {
 		ctn = parse("aaaa: foo");
 		List sequences = ctn.getSequences();
-		assertEquals("aaaa", sequences.get(0).getLabelString());
+		assertThat(sequences.get(0).getLabelString()).isEqualTo("aaaa");
 		ctn = parse("aaaa: foo && bar");
 		sequences = ctn.getSequences();
-		assertEquals("aaaa", sequences.get(0).getLabelString());
+		assertThat(sequences.get(0).getLabelString()).isEqualTo("aaaa");
 	}
 
 	@Test
 	public void nestedSplit1() {
 		TaskNode ctn = parse("< || jobC>");
-		assertEquals("< || jobC>", ctn.stringify());
+		assertThat(ctn.stringify()).isEqualTo("< || jobC>");
 		LabelledTaskNode start = ctn.getStart();
-		assertTrue(start instanceof FlowNode);
-		SplitNode split = (SplitNode) ((FlowNode) start).getSeriesElement(0);
-		LabelledTaskNode seriesElement = ((FlowNode) split.getSeriesElement(0)).getSeriesElement(0);
-		assertTrue(seriesElement instanceof SplitNode);
+		assertInstanceOf(FlowNode.class, start);
+		SplitNode split = (SplitNode) start.getSeriesElement(0);
+		LabelledTaskNode seriesElement = split.getSeriesElement(0).getSeriesElement(0);
+		assertInstanceOf(SplitNode.class, seriesElement);
 		SplitNode split2 = (SplitNode) seriesElement;
-		assertEquals(2, split2.getSeriesLength());
+		assertThat(split2.getSeriesLength()).isEqualTo(2);
 	}
 
 	@Test
 	public void nestedSplit2() {
 		TaskNode ctn = parse(" || jobD>");
-		assertEquals(" || jobD>", ctn.stringify());
+		assertThat(ctn.stringify()).isEqualTo(" || jobD>");
 		LabelledTaskNode start = ctn.getStart();
-		assertTrue(start.isFlow());
-		SplitNode split = (SplitNode) ((FlowNode) start).getSeriesElement(0);
-		assertEquals(3, split.getSeriesLength());
+		assertThat(start.isFlow()).isTrue();
+		SplitNode split = (SplitNode) start.getSeriesElement(0);
+		assertThat(split.getSeriesLength()).isEqualTo(3);
 		LabelledTaskNode seriesElement = split.getSeriesElement(1);
-		SplitNode splitSeriesElement = (SplitNode) ((FlowNode) seriesElement).getSeriesElement(0);
-		assertTrue(splitSeriesElement.isSplit());
-		assertEquals(2, splitSeriesElement.getSeriesLength());
-		assertEquals("", splitSeriesElement.stringify());
-		assertEquals("jobB",
-				((TaskAppNode) ((FlowNode) splitSeriesElement.getSeriesElement(0)).getSeriesElement(0)).getName());
+		SplitNode splitSeriesElement = (SplitNode) seriesElement.getSeriesElement(0);
+		assertThat(splitSeriesElement.isSplit()).isTrue();
+		assertThat(splitSeriesElement.getSeriesLength()).isEqualTo(2);
+		assertThat(splitSeriesElement.stringify()).isEqualTo("");
+		assertThat(((TaskAppNode) splitSeriesElement.getSeriesElement(0).getSeriesElement(0)).getName()).isEqualTo("jobB");
 	}
 
 	@Test
 	public void singleTransition() {
 		TaskNode ctn = parse("foo 'completed'->bar");
 		LabelledTaskNode start = ctn.getStart();
-		start = ((FlowNode) start).getSeriesElement(0);
-		assertTrue(start instanceof TaskAppNode);
+		start = start.getSeriesElement(0);
+		assertInstanceOf(TaskAppNode.class, start);
 		TaskAppNode ta = (TaskAppNode) start;
 		List transitions = ta.getTransitions();
-		assertEquals(1, transitions.size());
-		assertEquals("completed", transitions.get(0).getStatusToCheck());
-		assertEquals("bar", transitions.get(0).getTargetApp().getName());
+		assertThat(transitions.size()).isEqualTo(1);
+		assertThat(transitions.get(0).getStatusToCheck()).isEqualTo("completed");
+		assertThat(transitions.get(0).getTargetApp().getName()).isEqualTo("bar");
 	}
 
 	@Test
@@ -1262,13 +1241,13 @@ public void doubleTransition() {
 		TaskNode ctn = parse("foo 'completed'->bar 'wibble'->wobble");
 		LabelledTaskNode start = ctn.getStart();
 		assertFlow(start, "foo");
-		TaskAppNode ta = (TaskAppNode) ((FlowNode) start).getSeriesElement(0);
+		TaskAppNode ta = (TaskAppNode) start.getSeriesElement(0);
 		List transitions = ta.getTransitions();
-		assertEquals(2, transitions.size());
-		assertEquals("completed", transitions.get(0).getStatusToCheck());
-		assertEquals("bar", transitions.get(0).getTargetApp().getName());
-		assertEquals("wibble", transitions.get(1).getStatusToCheck());
-		assertEquals("wobble", transitions.get(1).getTargetApp().getName());
+		assertThat(transitions.size()).isEqualTo(2);
+		assertThat(transitions.get(0).getStatusToCheck()).isEqualTo("completed");
+		assertThat(transitions.get(0).getTargetApp().getName()).isEqualTo("bar");
+		assertThat(transitions.get(1).getStatusToCheck()).isEqualTo("wibble");
+		assertThat(transitions.get(1).getTargetApp().getName()).isEqualTo("wobble");
 	}
 
 	@Test
@@ -1280,85 +1259,73 @@ public void moreSophisticatedScenarios_gh712_1a() {
 		// https://user-images.githubusercontent.com/1562654/38313990-27662f60-37da-11e8-9106-26688d631fae.png
 		LabelledTaskNode start = ctn.getStart();
 		FlowNode f1 = (FlowNode) start;
-		assertEquals(1, f1.getSeriesLength());
+		assertThat(f1.getSeriesLength()).isEqualTo(1);
 		SplitNode s1 = (SplitNode) f1.getSeriesElement(0);
-		assertEquals(2, s1.getSeriesLength());
+		assertThat(s1.getSeriesLength()).isEqualTo(2);
 		// This one is just spark-yarn
 		assertFlow(s1.getSeriesElement(1), "spark-yarn");
 
 		// This one is a flow of a split of jdbchdfs-local/spark-client and
 		// spark-cluster/spark-cluster and then timestamp
 		FlowNode f2 = (FlowNode) s1.getSeriesElement(0);
-		assertEquals(2, f2.getSeriesLength());
-		assertEquals("timestamp", ((TaskAppNode) f2.getSeriesElement(1)).getName());
+		assertThat(f2.getSeriesLength()).isEqualTo(2);
+		assertThat(((TaskAppNode) f2.getSeriesElement(1)).getName()).isEqualTo("timestamp");
 
 		SplitNode s2 = (SplitNode) f2.getSeriesElement(0);
-		assertEquals(2, s2.getSeriesLength());
+		assertThat(s2.getSeriesLength()).isEqualTo(2);
 		FlowNode s2fa = (FlowNode) s2.getSeriesElement(0);
 		FlowNode s2fb = (FlowNode) s2.getSeriesElement(1);
 		assertFlow(s2fa, "jdbchdfs-local", "spark-client");
 		assertFlow(s2fb, "spark-cluster", "spark-cluster");
 
 		Graph graph = ctn.toGraph();
-		assertEquals(
-				"[0:START][1:jdbchdfs-local][2:spark-client][3:spark-cluster][4:two:spark-cluster][5:timestamp][6:spark-yarn][7:END]"+
-				"[0-1][1-2][0-3][3-4][2-5][4-5][0-6][5-7][6-7]",
-				graph.toVerboseString());
+		assertThat(graph.toVerboseString()).isEqualTo("[0:START][1:jdbchdfs-local][2:spark-client][3:spark-cluster][4:two:spark-cluster][5:timestamp][6:spark-yarn][7:END]" +
+		"[0-1][1-2][0-3][3-4][2-5][4-5][0-6][5-7][6-7]");
 
-		assertEquals(
-				"< && timestamp || spark-yarn>",
-				graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("< && timestamp || spark-yarn>");
 	}
 
 	@Test
 	public void moreSophisticatedScenarios_gh712_1b() {
 		TaskNode ctn = parse("< && CC || DD>");
 		Graph graph = ctn.toGraph();
-		assertEquals(
-				"[0:START][1:AA][2:BB][3:CC][4:DD][5:END]" +
-				"[0-1][0-2][1-3][2-3][0-4][3-5][4-5]",
-				graph.toVerboseString());
-		assertEquals("< && CC || DD>", graph.toDSLText());
+		assertThat(graph.toVerboseString()).isEqualTo("[0:START][1:AA][2:BB][3:CC][4:DD][5:END]" +
+		"[0-1][0-2][1-3][2-3][0-4][3-5][4-5]");
+		assertThat(graph.toDSLText()).isEqualTo("< && CC || DD>");
 	}
 
 	@Test
 	public void moreSophisticatedScenarios_gh712_1c() {
 		TaskNode ctn = parse("< && CC && DD || EE>");
 		Graph graph = ctn.toGraph();
-		assertEquals(
-				"[0:START][1:AA][2:BB][3:CC][4:DD][5:EE][6:END]" +
-				"[0-1][0-2][1-3][2-3][3-4][0-5][4-6][5-6]",
-				graph.toVerboseString());
-		assertEquals("< && CC && DD || EE>", graph.toDSLText());
+		assertThat(graph.toVerboseString()).isEqualTo("[0:START][1:AA][2:BB][3:CC][4:DD][5:EE][6:END]" +
+		"[0-1][0-2][1-3][2-3][3-4][0-5][4-6][5-6]");
+		assertThat(graph.toDSLText()).isEqualTo("< && CC && DD || EE>");
 		ctn = parse("< && CC && DD || EE>");
-		assertEquals("< && CC && DD || EE>", ctn.toGraph().toDSLText());
+		assertThat(ctn.toGraph().toDSLText()).isEqualTo("< && CC && DD || EE>");
 	}
 
 	@Test
 	public void moreSophisticatedScenarios_gh712_1d() {
 		TaskNode ctn = parse("< && AG || AB>");
-		assertEquals("< && AG || AB>", ctn.toGraph().toDSLText());
+		assertThat(ctn.toGraph().toDSLText()).isEqualTo("< && AG || AB>");
 		// Now include a transition
 		ctn = parse("< AH && AF> && AG || AB>");
 		Graph graph = ctn.toGraph();
-		assertEquals(
-				"[0:START][1:AC][2:AD][3:AE][4:AH][5:AF][6:AG][7:AB][8:END]" +
-				"[0-1][1-2][0-3][jumpOut:3-4][3-5][2-6][5-6][4-6][0-7][6-8][7-8]",
-				graph.toVerboseString());
+		assertThat(graph.toVerboseString()).isEqualTo("[0:START][1:AC][2:AD][3:AE][4:AH][5:AF][6:AG][7:AB][8:END]" +
+		"[0-1][1-2][0-3][jumpOut:3-4][3-5][2-6][5-6][4-6][0-7][6-8][7-8]");
 		// Key thing to observe above is the link from [4-6] which goes from
 		// the transition target AH to the end of the split AG
-		assertEquals("<AH && AF> && AG || AB>", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("<AH && AF> && AG || AB>");
 	}
 
 	@Test
 	public void moreSophisticatedScenarios_gh712_1e() {
 		TaskNode ctn = parse("< && CC && DD ||  && GG || HH>");
 		Graph graph = ctn.toGraph();
-		assertEquals(
-				"[0:START][1:AA][2:BB][3:CC][4:DD][5:EE][6:FF][7:GG][8:HH][9:END]" +
-				"[0-1][0-2][1-3][2-3][3-4][0-5][0-6][5-7][6-7][0-8][4-9][7-9][8-9]",
-				graph.toVerboseString());
-		assertEquals("< && CC && DD ||  && GG || HH>", graph.toDSLText());
+		assertThat(graph.toVerboseString()).isEqualTo("[0:START][1:AA][2:BB][3:CC][4:DD][5:EE][6:FF][7:GG][8:HH][9:END]" +
+		"[0-1][0-2][1-3][2-3][3-4][0-5][0-6][5-7][6-7][0-8][4-9][7-9][8-9]");
+		assertThat(graph.toDSLText()).isEqualTo("< && CC && DD ||  && GG || HH>");
 	}
 
 	@Test
@@ -1366,11 +1333,9 @@ public void moreSophisticatedScenarios_gh712_1f() {
 		// Multiple nested splits in parallel
 		TaskNode ctn = parse("< && CC || 
&& FF && GG || HH>"); Graph graph = ctn.toGraph(); - assertEquals( - "[0:START][1:AA][2:BB][3:CC][4:DD][5:EE][6:FF][7:GG][8:HH][9:END]"+ - "[0-1][0-2][1-3][2-3][0-4][0-5][4-6][5-6][6-7][0-8][3-9][7-9][8-9]", - graph.toVerboseString()); - assertEquals("< && CC ||
&& FF && GG || HH>", graph.toDSLText()); + assertThat(graph.toVerboseString()).isEqualTo("[0:START][1:AA][2:BB][3:CC][4:DD][5:EE][6:FF][7:GG][8:HH][9:END]" + + "[0-1][0-2][1-3][2-3][0-4][0-5][4-6][5-6][6-7][0-8][3-9][7-9][8-9]"); + assertThat(graph.toDSLText()).isEqualTo("< && CC ||
&& FF && GG || HH>"); } // Case2: expecting a validation error on the parse because the second spark-cluster @@ -1379,14 +1344,14 @@ public void moreSophisticatedScenarios_gh712_1f() { public void moreSophisticatedScenarios_gh712_2() { try { parse("< && timestamp || spark-yarn>"); - fail(); + fail(""); } catch (TaskValidationException tve) { List validationProblems = tve.getValidationProblems(); - assertEquals(1, validationProblems.size()); + assertThat(validationProblems.size()).isEqualTo(1); TaskValidationProblem tvp = validationProblems.get(0); - assertEquals(53, tvp.getOffset()); - assertEquals(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE, tvp.getMessage()); + assertThat(tvp.getOffset()).isEqualTo(53); + assertThat(tvp.getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE); } } @@ -1395,40 +1360,40 @@ public void moreSophisticatedScenarios_gh712_2() { public void moreSophisticatedScenarios_gh712_3() { try { parse("<1: jdbchdfs-local && spark-client && timestamp || spark-cluster && spark-cluster && timestamp || spark-yarn>"); - fail(); + fail(""); } catch (TaskValidationException tve) { System.out.println(tve); List validationProblems = tve.getValidationProblems(); - assertEquals(2, validationProblems.size()); + assertThat(validationProblems.size()).isEqualTo(2); TaskValidationProblem tvp = validationProblems.get(0); - assertEquals(68, tvp.getOffset()); - assertEquals(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE, tvp.getMessage()); + assertThat(tvp.getOffset()).isEqualTo(68); + assertThat(tvp.getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE); tvp = validationProblems.get(1); - assertEquals(85, tvp.getOffset()); - assertEquals(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE, tvp.getMessage()); + assertThat(tvp.getOffset()).isEqualTo(85); + assertThat(tvp.getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE); } } @Test public void wildcardTransition() { ctn = parse("foo '*'->wibble"); - assertEquals("foo '*'->wibble", ctn.toDSL()); + assertThat(ctn.toDSL()).isEqualTo("foo '*'->wibble"); ctn = parse("foo \"*\"->wibble"); - assertEquals("foo \"*\"->wibble", ctn.toDSL()); + assertThat(ctn.toDSL()).isEqualTo("foo \"*\"->wibble"); } @Test public void splitWithTransition() { String spec = "kill || bar>"; ctn = parse(spec); - assertEquals(spec, ctn.toDSL()); + assertThat(ctn.toDSL()).isEqualTo(spec); } @Test public void multiLine() { TaskNode ctn = parse("kill\n" + " '*'->custard\n" + " || bar>"); - assertEquals("kill '*'->custard || bar>", ctn.stringify()); + assertThat(ctn.stringify()).isEqualTo("kill '*'->custard || bar>"); } @Test @@ -1439,7 +1404,7 @@ public void emptyInput() { @Test public void toGraph$END() { TaskNode ctn = parse("foo 'oranges'->$END"); - assertEquals("foo 'oranges'->$END", ctn.toDSL()); + assertThat(ctn.toDSL()).isEqualTo("foo 'oranges'->$END"); assertGraph("[0:START][1:foo][2:$END][3:END][0-1][oranges:1-2][1-3]", "foo 'oranges'->$END"); checkDSLToGraphAndBackToDSL("foo 'oranges'->$END"); } @@ -1447,7 +1412,7 @@ public void emptyInput() { @Test public void toGraph$FAIL() { String spec = "foo 'oranges'->$FAIL"; - assertEquals(spec, parse(spec).toDSL()); + assertThat(parse(spec).toDSL()).isEqualTo(spec); assertGraph("[0:START][1:foo][2:$FAIL][3:END][0-1][oranges:1-2][1-3]", spec); checkDSLToGraphAndBackToDSL(spec); } @@ -1478,33 +1443,33 @@ public void spacesInProperties() { properties.put("two", "b ar"); Node newNode = new Node(n.id, n.name, properties); graph.nodes.set(1, newNode); - assertEquals("aaa --one=bar --two='b ar'", graph.toDSLText()); + assertThat(graph.toDSLText()).isEqualTo("aaa --one=bar --two='b ar'"); graph.nodes.add(new Node("3", "bbb")); graph.links.add(new Link("1", "3", "tname")); - assertEquals("aaa --one=bar --two='b ar' 'tname'->bbb", graph.toDSLText()); + assertThat(graph.toDSLText()).isEqualTo("aaa --one=bar --two='b ar' 'tname'->bbb"); graph.nodes.add(new Node("4", "ccc")); graph.links.add(new Link("1", "4", "*")); - assertEquals("aaa --one=bar --two='b ar' 'tname'->bbb '*'->ccc", graph.toDSLText()); + assertThat(graph.toDSLText()).isEqualTo("aaa --one=bar --two='b ar' 'tname'->bbb '*'->ccc"); graph.nodes.add(new Node("5", "ddd")); graph.links.add(new Link("1", "5", "3")); - assertEquals("aaa --one=bar --two='b ar' 'tname'->bbb '*'->ccc 3->ddd", graph.toDSLText()); + assertThat(graph.toDSLText()).isEqualTo("aaa --one=bar --two='b ar' 'tname'->bbb '*'->ccc 3->ddd"); // When going from DSL to graph, unquote property values and exit codes String dsl = "aaa --one=bar --two='b ar' 'tname'->bbb '*'->ccc 3->ddd"; graph = parse(dsl).toGraph(); n = graph.nodes.get(1); - assertEquals("b ar", n.properties.get("two")); + assertThat(n.properties.get("two")).isEqualTo("b ar"); Link l = graph.links.get(1); - assertEquals("tname", l.getTransitionName()); + assertThat(l.getTransitionName()).isEqualTo("tname"); l = graph.links.get(2); - assertEquals("*", l.getTransitionName()); + assertThat(l.getTransitionName()).isEqualTo("*"); l = graph.links.get(3); - assertEquals("3", l.getTransitionName()); - assertEquals(dsl, graph.toDSLText()); + assertThat(l.getTransitionName()).isEqualTo("3"); + assertThat(graph.toDSLText()).isEqualTo(dsl); } @Test @@ -1515,12 +1480,12 @@ public void wildcardTransitions() { dsl); Graph graph = parse(dsl).toGraph(); Link l = graph.links.get(1); - assertEquals("tname", l.getTransitionName()); + assertThat(l.getTransitionName()).isEqualTo("tname"); l = graph.links.get(2); - assertEquals("*", l.getTransitionName()); + assertThat(l.getTransitionName()).isEqualTo("*"); l = graph.links.get(3); - assertEquals("3", l.getTransitionName()); - assertEquals(dsl, graph.toDSLText()); + assertThat(l.getTransitionName()).isEqualTo("3"); + assertThat(graph.toDSLText()).isEqualTo(dsl); } @Test @@ -1564,43 +1529,43 @@ private TaskNode parse(String composedTaskName, String dsltext, boolean validate } private void assertToken(TokenKind kind, String string, int start, int end, Token t) { - assertEquals(kind, t.kind); - assertEquals(string, t.getKind().hasPayload() ? t.stringValue() : new String(t.getKind().getTokenChars())); - assertEquals(start, t.startPos); - assertEquals(end, t.endPos); + assertThat(t.kind).isEqualTo(kind); + assertThat(t.getKind().hasPayload() ? t.stringValue() : new String(t.getKind().getTokenChars())).isEqualTo(string); + assertThat(t.startPos).isEqualTo(start); + assertThat(t.endPos).isEqualTo(end); } private void assertTokens(Tokens tokens, TokenKind... expectedKinds) { for (int i = 0; i < expectedKinds.length; i++) { - assertEquals(expectedKinds[i], tokens.next().getKind()); + assertThat(tokens.next().getKind()).isEqualTo(expectedKinds[i]); } } private void assertTaskApp(LabelledTaskNode node, String taskAppName) { - assertTrue(node.isTaskApp()); - assertEquals(((TaskAppNode) node).getName(), taskAppName); + assertThat(node.isTaskApp()).isTrue(); + assertThat(taskAppName).isEqualTo(((TaskAppNode) node).getName()); } private void assertFlow(LabelledTaskNode node, String... expectedApps) { - assertTrue(node instanceof FlowNode); + assertInstanceOf(FlowNode.class, node); FlowNode flow = (FlowNode) node; List series = flow.getSeries(); - assertEquals(expectedApps.length, series.size()); - assertEquals(expectedApps.length, flow.getSeriesLength()); + assertThat(series.size()).isEqualTo(expectedApps.length); + assertThat(flow.getSeriesLength()).isEqualTo(expectedApps.length); for (int a = 0; a < expectedApps.length; a++) { assertTaskApp(series.get(a), expectedApps[a]); } } private void assertSplit(LabelledTaskNode node, String... expectedApps) { - assertTrue(node instanceof SplitNode); + assertInstanceOf(SplitNode.class, node); SplitNode split = (SplitNode) node; List series = split.getSeries(); - assertEquals(expectedApps.length, series.size()); - assertEquals(expectedApps.length, split.getSeriesLength()); + assertThat(series.size()).isEqualTo(expectedApps.length); + assertThat(split.getSeriesLength()).isEqualTo(expectedApps.length); for (int a = 0; a < expectedApps.length; a++) { FlowNode f = (FlowNode) series.get(a); - assertEquals(1, f.getSeriesLength()); + assertThat(f.getSeriesLength()).isEqualTo(1); assertTaskApp(f.getSeriesElement(0), expectedApps[a]); } } @@ -1612,11 +1577,11 @@ private ParseException checkForParseError(String dsl, DSLMessage msg, int pos, O return null; } catch (ParseException e) { - assertEquals(msg, e.getMessageCode()); - assertEquals(pos, e.getPosition()); + assertThat(e.getMessageCode()).isEqualTo(msg); + assertThat(e.getPosition()).isEqualTo(pos); if (inserts != null) { for (int i = 0; i < inserts.length; i++) { - assertEquals(inserts[i], e.getInserts()[i]); + assertThat(e.getInserts()[i]).isEqualTo(inserts[i]); } } return e; @@ -1624,8 +1589,8 @@ private ParseException checkForParseError(String dsl, DSLMessage msg, int pos, O } private void assertApps(List taskApps, String... expectedTaskAppNames) { - assertEquals("Expected " + expectedTaskAppNames.length + " but was " + taskApps.size() + ": " + taskApps, - expectedTaskAppNames.length, taskApps.size()); + + assertThat(taskApps.size()).as("Expected " + expectedTaskAppNames.length + " but was " + taskApps.size() + ": " + taskApps).isEqualTo(expectedTaskAppNames.length); Set set2 = new HashSet(); for (TaskApp taskApp : taskApps) { StringBuilder s = new StringBuilder(); @@ -1652,13 +1617,13 @@ private void assertApps(List taskApps, String... expectedTaskAppNames) private void checkDSLToGraphAndBackToDSL(String specification) { TaskNode ctn = parse(specification); Graph graph = ctn.toGraph(); - assertEquals(specification, graph.toDSLText()); + assertThat(graph.toDSLText()).isEqualTo(specification); } private void assertGraph(String expectedGraph, String dsl) { TaskNode ctn = parse(dsl); Graph graph = ctn.toGraph(); - assertEquals(expectedGraph, graph.toVerboseString()); + assertThat(graph.toVerboseString()).isEqualTo(expectedGraph); } private void assertTaskApps(String composedTaskName, String spec, String... expectedTaskApps) { @@ -1673,7 +1638,7 @@ private void assertTaskApps(String composedTaskName, String spec, String... expe s.append(":").append(arg.getKey()).append("=").append(arg.getValue()); } } - assertEquals(s.toString(), expectedTaskApp); + assertThat(expectedTaskApp).isEqualTo(s.toString()); } } diff --git a/spring-cloud-dataflow-core/pom.xml b/spring-cloud-dataflow-core/pom.xml index c344f9a5cf..2833956c6b 100644 --- a/spring-cloud-dataflow-core/pom.xml +++ b/spring-cloud-dataflow-core/pom.xml @@ -4,10 +4,18 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-core + spring-cloud-dataflow-core + Spring Cloud Data Flow Core + jar + + true + 3.4.1 + org.springframework.boot @@ -20,9 +28,19 @@ + + org.springframework.cloud + spring-cloud-task-batch + org.springframework.cloud spring-cloud-dataflow-core-dsl + ${project.version} + + + org.springframework.cloud + spring-cloud-dataflow-schema-core + ${project.version} org.springframework.cloud @@ -32,6 +50,10 @@ com.fasterxml.jackson.core jackson-annotations + + com.fasterxml.jackson.core + jackson-databind + org.springframework.data spring-data-keyvalue @@ -60,14 +82,46 @@ javax.validation validation-api + + org.springframework.hateoas + spring-hateoas + org.springframework.boot spring-boot-starter-test test - - org.springframework.hateoas - spring-hateoas - + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppRegistration.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppRegistration.java index 6aab9e506d..18532d1408 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppRegistration.java +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppRegistration.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2020 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,10 +21,13 @@ import java.util.Objects; import javax.persistence.Entity; +import javax.persistence.EnumType; +import javax.persistence.Enumerated; import javax.persistence.Lob; import javax.persistence.Table; import javax.persistence.Transient; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.util.Assert; /** @@ -35,6 +38,7 @@ * @author Christian Tzolov * @author Vinicius Carvalho * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ @Entity @Table(name = "AppRegistration") @@ -74,6 +78,12 @@ public class AppRegistration extends AbstractEntity implements Comparable versions; @@ -124,6 +134,21 @@ public AppRegistration(String name, ApplicationType type, String version, URI ur this.metadataUri = metadataUri; } + /** + * Construct an {@code AppRegistration} object. + * + * @param name app name + * @param type app type + * @param version app version + * @param uri URI for the app resource + * @param metadataUri URI for the app metadata resource + * @param bootVersion The bootVersion of the application. + */ + public AppRegistration(String name, ApplicationType type, String version, URI uri, URI metadataUri, AppBootSchemaVersion bootVersion) { + this(name,type,version,uri,metadataUri); + this.bootVersion = bootVersion; + } + /** * @return the name of the app */ @@ -176,6 +201,14 @@ public void setMetadataUri(URI metadataUri) { this.metadataUri = metadataUri; } + public AppBootSchemaVersion getBootVersion() { + return bootVersion == null ? AppBootSchemaVersion.defaultVersion() : bootVersion; + } + + public void setBootVersion(AppBootSchemaVersion bootVersion) { + this.bootVersion = bootVersion; + } + public Boolean isDefaultVersion() { return this.defaultVersion; } @@ -196,7 +229,8 @@ public void setVersions(HashSet versions) { public String toString() { return "AppRegistration{" + "name='" + this.getName() + '\'' + ", type='" + this.getType() + '\'' + ", version='" + this.getVersion() + '\'' + ", uri=" + this.getUri() - + ", metadataUri=" + this.getMetadataUri() + '}'; + + ", metadataUri=" + this.getMetadataUri() + + ", bootVersion='" + this.getBootVersion().getBootVersion() + '}'; } @Override diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/DataFlowAppDefinition.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/DataFlowAppDefinition.java index dc604a4002..acbeec7d6b 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/DataFlowAppDefinition.java +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/DataFlowAppDefinition.java @@ -109,6 +109,8 @@ public ApplicationType getApplicationType() { return applicationType; } + public AppDefinition getAppDefinition() { return appDefinition; } + @Override public boolean equals(Object obj) { if (this == obj) diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/LaunchResponse.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/LaunchResponse.java new file mode 100644 index 0000000000..6fd87b79c1 --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/LaunchResponse.java @@ -0,0 +1,59 @@ +package org.springframework.cloud.dataflow.core; + +import java.util.Objects; + +public class LaunchResponse { + private long executionId; + + private String schemaTarget; + + public LaunchResponse() { + } + + public LaunchResponse(long executionId, String schemaTarget) { + this.executionId = executionId; + this.schemaTarget = schemaTarget; + } + + public long getExecutionId() { + return executionId; + } + + public void setExecutionId(long executionId) { + this.executionId = executionId; + } + + public String getSchemaTarget() { + return schemaTarget; + } + + public void setSchemaTarget(String schemaTarget) { + this.schemaTarget = schemaTarget; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + LaunchResponse that = (LaunchResponse) o; + + if (executionId != that.executionId) return false; + return Objects.equals(schemaTarget, that.schemaTarget); + } + + @Override + public int hashCode() { + int result = (int) (executionId ^ (executionId >>> 32)); + result = 31 * result + (schemaTarget != null ? schemaTarget.hashCode() : 0); + return result; + } + + @Override + public String toString() { + return "LaunchResponse{" + + "taskId=" + executionId + + ", schemaTarget='" + schemaTarget + '\'' + + '}'; + } +} diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/StreamDefinitionService.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/StreamDefinitionService.java index 535ce8cb2c..ae55d39305 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/StreamDefinitionService.java +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/StreamDefinitionService.java @@ -57,7 +57,9 @@ public interface StreamDefinitionService { /** * Return the updated stream DSL for the given stream definition with the associated properties. * + * @param originalDslText Text if the original DSL. * @param streamAppDefinitions the linked list of {@link StreamAppDefinition}s associated with the stream with some of app properties modified + * * @return the updated stream DSL */ String constructDsl(String originalDslText, LinkedList streamAppDefinitions); diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/StreamDefinitionServiceUtils.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/StreamDefinitionServiceUtils.java index 814e2090e8..9d33cac055 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/StreamDefinitionServiceUtils.java +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/StreamDefinitionServiceUtils.java @@ -49,6 +49,7 @@ public static LinkedList sanitizeStreamAppDefinitions(Link * stream. The application definitions are returned in reverse order; i.e. the sink is * returned first followed by the processors in reverse order followed by the source. * + * @param streamAppDefinitions List of stream definitions * @return iterator that iterates over the application definitions in deployment order */ public static Iterator getDeploymentOrderIterator(LinkedList streamAppDefinitions) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/DatabaseType.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/DatabaseType.java similarity index 75% rename from spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/DatabaseType.java rename to spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/DatabaseType.java index 40f3b6c05b..f207ac3df5 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/DatabaseType.java +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/DatabaseType.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,8 +14,9 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.server.repository.support; +package org.springframework.cloud.dataflow.core.database.support; +import java.sql.DatabaseMetaData; import java.util.HashMap; import java.util.Map; @@ -37,7 +38,9 @@ public enum DatabaseType { HSQL("HSQL Database Engine"), H2("H2"), ORACLE("Oracle"), + MARIADB("MariaDB"), MYSQL("MySQL"), + POSTGRES("PostgreSQL"), SQLSERVER("Microsoft SQL Server"), DB2("DB2"); @@ -73,12 +76,10 @@ public static DatabaseType fromMetaData(DataSource dataSource) throws MetaDataAc .toString(); if (!databaseProductVersion.startsWith("SQL")) { databaseProductName = "DB2ZOS"; - } - else { + } else { databaseProductName = JdbcUtils.commonDatabaseName(databaseProductName); } - } - else { + } else if(!databaseProductName.equals("MariaDB")) { databaseProductName = JdbcUtils.commonDatabaseName(databaseProductName); } return fromProductName(databaseProductName); @@ -100,6 +101,24 @@ public static DatabaseType fromProductName(String productName) { } } + /** + * Determines if the Database that the datasource refers to supports the {@code ROW_NUMBER()} SQL function. + * @param dataSource the datasource pointing to the DB in question + * @return whether the database supports the SQL {@code ROW_NUMBER()} function + * @throws MetaDataAccessException if error occurs + */ + public static boolean supportsRowNumberFunction(DataSource dataSource) throws MetaDataAccessException { + DatabaseType databaseType = DatabaseType.fromMetaData(dataSource); + if (databaseType == DatabaseType.H2 || databaseType == DatabaseType.HSQL) { + return false; + } + if (databaseType != DatabaseType.MYSQL) { + return true; + } + int majorVersion = JdbcUtils.extractDatabaseMetaData(dataSource, DatabaseMetaData::getDatabaseMajorVersion); + return (majorVersion >= 8); + } + private String getProductName() { return productName; } diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/IncrementerType.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/IncrementerType.java new file mode 100644 index 0000000000..87f8f26667 --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/IncrementerType.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.core.database.support; + +public enum IncrementerType { + DEFAULT, + TABLE, + SEQUENCE +} diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MariaDBSequenceMaxValueIncrementer.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MariaDBSequenceMaxValueIncrementer.java new file mode 100644 index 0000000000..ab6ee1cdc8 --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MariaDBSequenceMaxValueIncrementer.java @@ -0,0 +1,18 @@ +package org.springframework.cloud.dataflow.core.database.support; + +import javax.sql.DataSource; + +import org.springframework.jdbc.support.incrementer.AbstractSequenceMaxValueIncrementer; + +public class MariaDBSequenceMaxValueIncrementer extends AbstractSequenceMaxValueIncrementer { + public MariaDBSequenceMaxValueIncrementer() { + } + + public MariaDBSequenceMaxValueIncrementer(DataSource dataSource, String incrementerName) { + super(dataSource, incrementerName); + } + + protected String getSequenceQuery() { + return "select next value for " + this.getIncrementerName(); + } +} diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaIncrementerFactory.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaIncrementerFactory.java new file mode 100644 index 0000000000..532fb881e9 --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaIncrementerFactory.java @@ -0,0 +1,88 @@ +package org.springframework.cloud.dataflow.core.database.support; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; + +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; +import org.springframework.jdbc.support.MetaDataAccessException; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; + +public class MultiSchemaIncrementerFactory extends DefaultDataFieldMaxValueIncrementerFactory { + private final static Logger logger = LoggerFactory.getLogger(MultiSchemaIncrementerFactory.class); + + private final DataSource dataSource; + + public MultiSchemaIncrementerFactory(DataSource dataSource) { + super(dataSource); + this.dataSource = dataSource; + } + + @Override + public DataFieldMaxValueIncrementer getIncrementer(String incrementerType, String incrementerName) { + DatabaseType databaseType; + try { + databaseType = DatabaseType.fromMetaData(this.dataSource); + } catch (MetaDataAccessException e) { + throw new IllegalStateException(e); + } + if (databaseType != null) { + IncrementerType type = getIncrementerType(databaseType, incrementerName); + if (type == IncrementerType.SEQUENCE) { + switch (databaseType) { + case SQLSERVER: + return new SqlServerSequenceMaxValueIncrementer(this.dataSource, incrementerName); + case MARIADB: + return new MariaDBSequenceMaxValueIncrementer(this.dataSource, incrementerName); + } + } + } + return super.getIncrementer(incrementerType, incrementerName); + } + + private IncrementerType getIncrementerType(DatabaseType databaseType, String incrementerName) { + + try (Connection connection = this.dataSource.getConnection()) { + if(databaseType == DatabaseType.SQLSERVER) { + try(Statement statement = connection.createStatement()) { + try(ResultSet sequences = statement.executeQuery("SELECT name FROM sys.sequences")) { + while (sequences.next()) { + String sequenceName = sequences.getString(1); + logger.debug("Sequence:{}", sequenceName); + if(sequenceName.equalsIgnoreCase(incrementerName)) { + return IncrementerType.SEQUENCE; + } + } + } + } catch (Throwable x) { + logger.warn("Ignoring error:" + x); + } + } + DatabaseMetaData metaData = connection.getMetaData(); + String[] types = {"TABLE", "SEQUENCE"}; + try (ResultSet tables = metaData.getTables(null, null, "%", types)) { + while (tables.next()) { + String tableName = tables.getString("TABLE_NAME"); + if (tableName.equalsIgnoreCase(incrementerName)) { + String tableType = tables.getString("TABLE_TYPE"); + logger.debug("Found Table:{}:{}", incrementerName, tableType); + if (tableType != null && tableType.toUpperCase().contains("SEQUENCE")) { + return IncrementerType.SEQUENCE; + } + return IncrementerType.TABLE; + } + } + } + } catch (SQLException sqe) { + logger.warn(sqe.getMessage(), sqe); + } + return IncrementerType.DEFAULT; + } +} diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaTaskExecutionDaoFactoryBean.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaTaskExecutionDaoFactoryBean.java new file mode 100644 index 0000000000..fdcedb1627 --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaTaskExecutionDaoFactoryBean.java @@ -0,0 +1,35 @@ +package org.springframework.cloud.dataflow.core.database.support; + +import javax.sql.DataSource; + +import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; +import org.springframework.cloud.task.repository.dao.JdbcTaskExecutionDao; +import org.springframework.cloud.task.repository.dao.TaskExecutionDao; +import org.springframework.cloud.task.repository.support.DatabaseType; +import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; +import org.springframework.jdbc.support.MetaDataAccessException; + +public class MultiSchemaTaskExecutionDaoFactoryBean extends TaskExecutionDaoFactoryBean { + private final DataSource dataSource; + private final String tablePrefix; + public MultiSchemaTaskExecutionDaoFactoryBean(DataSource dataSource, String tablePrefix) { + super(dataSource, tablePrefix); + this.dataSource = dataSource; + this.tablePrefix = tablePrefix; + } + + @Override + public TaskExecutionDao getObject() throws Exception { + DataFieldMaxValueIncrementerFactory incrementerFactory = new MultiSchemaIncrementerFactory(dataSource); + JdbcTaskExecutionDao dao = new JdbcTaskExecutionDao(dataSource, this.tablePrefix); + String databaseType; + try { + databaseType = DatabaseType.fromMetaData(dataSource).name(); + } + catch (MetaDataAccessException e) { + throw new IllegalStateException(e); + } + dao.setTaskIncrementer(incrementerFactory.getIncrementer(databaseType, this.tablePrefix + "SEQ")); + return dao; + } +} diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/SqlServerSequenceMaxValueIncrementer.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/SqlServerSequenceMaxValueIncrementer.java new file mode 100644 index 0000000000..e301274a9c --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/SqlServerSequenceMaxValueIncrementer.java @@ -0,0 +1,18 @@ +package org.springframework.cloud.dataflow.core.database.support; + +import javax.sql.DataSource; + +import org.springframework.jdbc.support.incrementer.AbstractSequenceMaxValueIncrementer; + +public class SqlServerSequenceMaxValueIncrementer extends AbstractSequenceMaxValueIncrementer { + public SqlServerSequenceMaxValueIncrementer() { + } + + public SqlServerSequenceMaxValueIncrementer(DataSource dataSource, String incrementerName) { + super(dataSource, incrementerName); + } + + protected String getSequenceQuery() { + return "select next value for " + this.getIncrementerName(); + } +} diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/AppRegistrationTests.java b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/AppRegistrationTests.java index 908e8ebab2..c1f2251307 100644 --- a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/AppRegistrationTests.java +++ b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/AppRegistrationTests.java @@ -18,7 +18,7 @@ import java.net.URI; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/ArgumentSanitizerTest.java b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/ArgumentSanitizerTest.java index 9021c98ae5..1d28453ada 100644 --- a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/ArgumentSanitizerTest.java +++ b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/ArgumentSanitizerTest.java @@ -19,9 +19,10 @@ import java.util.ArrayList; import java.util.List; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author Christian Tzolov @@ -34,7 +35,7 @@ public class ArgumentSanitizerTest { private static final String[] keys = { "password", "secret", "key", "token", ".*credentials.*", "vcap_services", "url" }; - @Before + @BeforeEach public void before() { sanitizer = new ArgumentSanitizer(); } @@ -42,8 +43,8 @@ public void before() { @Test public void testSanitizeProperties() { for (String key : keys) { - Assert.assertEquals("--" + key + "=******", sanitizer.sanitize("--" + key + "=foo")); - Assert.assertEquals("******", sanitizer.sanitize(key, "bar")); + assertEquals("--" + key + "=******", sanitizer.sanitize("--" + key + "=foo")); + assertEquals("******", sanitizer.sanitize(key, "bar")); } } @@ -57,11 +58,11 @@ public void testSanitizeArguments() { final List sanitizedArguments = sanitizer.sanitizeArguments(arguments); - Assert.assertEquals(keys.length, sanitizedArguments.size()); + assertEquals(keys.length, sanitizedArguments.size()); int order = 0; for(String sanitizedString : sanitizedArguments) { - Assert.assertEquals("--" + keys[order] + "=******", sanitizedString); + assertEquals("--" + keys[order] + "=******", sanitizedString); order++; } } @@ -69,26 +70,26 @@ public void testSanitizeArguments() { @Test public void testMultipartProperty() { - Assert.assertEquals("--password=******", sanitizer.sanitize("--password=boza")); - Assert.assertEquals("--one.two.password=******", sanitizer.sanitize("--one.two.password=boza")); - Assert.assertEquals("--one_two_password=******", sanitizer.sanitize("--one_two_password=boza")); + assertEquals("--password=******", sanitizer.sanitize("--password=boza")); + assertEquals("--one.two.password=******", sanitizer.sanitize("--one.two.password=boza")); + assertEquals("--one_two_password=******", sanitizer.sanitize("--one_two_password=boza")); } // @Test // public void testHierarchicalPropertyNames() { -// Assert.assertEquals("time --password='******' | log", +// assertEquals("time --password='******' | log", // sanitizer.(new StreamDefinition("stream", "time --password=bar | log"))); // } // // @Test // public void testStreamPropertyOrder() { -// Assert.assertEquals("time --some.password='******' --another-secret='******' | log", +// assertEquals("time --some.password='******' --another-secret='******' | log", // sanitizer.sanitizeStream(new StreamDefinition("stream", "time --some.password=foobar --another-secret=kenny | log"))); // } // // @Test // public void testStreamMatcherWithHyphenDotChar() { -// Assert.assertEquals("twitterstream --twitter.credentials.access-token-secret='******' " +// assertEquals("twitterstream --twitter.credentials.access-token-secret='******' " // + "--twitter.credentials.access-token='******' --twitter.credentials.consumer-secret='******' " // + "--twitter.credentials.consumer-key='******' | " // + "filter --expression=#jsonPath(payload,'$.lang')=='en' | " @@ -105,6 +106,6 @@ public void testMultipartProperty() { // @Test // public void testStreamSanitizeOriginalDsl() { // StreamDefinition streamDefinition = new StreamDefinition("test", "time --password='******' | log --password='******'", "time --password='******' | log"); -// Assert.assertEquals("time --password='******' | log", sanitizer.sanitizeOriginalStreamDsl(streamDefinition)); +// assertEquals("time --password='******' | log", sanitizer.sanitizeOriginalStreamDsl(streamDefinition)); // } } diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamApplicationDefinitionTests.java b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamApplicationDefinitionTests.java index 850f610d30..d283ed2eb6 100644 --- a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamApplicationDefinitionTests.java +++ b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamApplicationDefinitionTests.java @@ -16,9 +16,11 @@ package org.springframework.cloud.dataflow.core; -import org.junit.Test; -import static org.junit.Assert.assertEquals; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author Patrick Peralta diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionServiceUtilsTests.java b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionServiceUtilsTests.java index 74eb7b0c42..64723515a6 100644 --- a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionServiceUtilsTests.java +++ b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionServiceUtilsTests.java @@ -20,10 +20,11 @@ import java.util.LinkedList; import java.util.List; -import org.junit.Ignore; -import org.junit.Test; -import static org.junit.Assert.assertEquals; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author Christian Tzolov @@ -38,7 +39,7 @@ public void testStreamCreation() { reverseDslTest("time | log", 2); } - @Ignore + @Disabled @Test public void quotesInParams() { reverseDslTest("foo --bar='payload.matches(''hello'')' | file", 2); @@ -59,13 +60,13 @@ public void testBindings3Apps() { reverseDslTest("time | filter | log", 3); } - @Ignore + @Disabled @Test public void testXD2416_1() { reverseDslTest("http | transform --expression='payload.replace(\"abc\", \"\")' | log", 3); } - @Ignore + @Disabled @Test public void testXD2416_2() { reverseDslTest("http | transform --expression='payload.replace(\"abc\", '''')' | log", 3); @@ -213,13 +214,13 @@ public void autoQuotesOnSemicolonProperties() { streamDefinition = new StreamDefinition("stream2", "jdbc-mssql --cron='/10 * * * * *' " + "--max-messages=-1 --password='******' --query='UPDATE top (100) ASSURANCE SET assurance_flag = 1 " + "OUTPUT Inserted.* WHERE assurance_flag IS NULL' " + - "--url='jdbc:sqlserver://db:1433;databaseName=Spring' --username='*****' | " + + "--url='jdbc:sqlserver://db:1433;encrypt=false&databaseName=Spring' --username='*****' | " + "cust-processor | router --default-output-channel=out"); assertEquals("jdbc-mssql --cron='/10 * * * * *' " + "--max-messages=-1 --password='******' --query='UPDATE top (100) ASSURANCE SET assurance_flag = 1 " + "OUTPUT Inserted.* WHERE assurance_flag IS NULL' " + - "--url='jdbc:sqlserver://db:1433;databaseName=Spring' --username='*****' | " + + "--url='jdbc:sqlserver://db:1433;encrypt=false&databaseName=Spring' --username='*****' | " + "cust-processor | router --default-output-channel=out", this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), this.streamDefinitionService.getAppDefinitions(streamDefinition))); @@ -231,13 +232,13 @@ public void autoQuotesOnStarProperties() { StreamDefinition streamDefinition = new StreamDefinition("stream2", "jdbc-mssql --cron='/10 * * * * *' " + "--max-messages=-1 --password='******' --query='UPDATE top (100) ASSURANCE SET assurance_flag = 1 " + "OUTPUT Inserted.* WHERE assurance_flag IS NULL' " + - "--url='jdbc:sqlserver://db:1433;databaseName=Spring' --username='*****' | " + + "--url='jdbc:sqlserver://db:1433;encrypt=false&databaseName=Spring' --username='*****' | " + "cust-processor | router --default-output-channel=out"); assertEquals("jdbc-mssql --cron='/10 * * * * *' " + "--max-messages=-1 --password='******' --query='UPDATE top (100) ASSURANCE SET assurance_flag = 1 " + "OUTPUT Inserted.* WHERE assurance_flag IS NULL' " + - "--url='jdbc:sqlserver://db:1433;databaseName=Spring' --username='*****' | " + + "--url='jdbc:sqlserver://db:1433;encrypt=false&databaseName=Spring' --username='*****' | " + "cust-processor | router --default-output-channel=out", this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), this.streamDefinitionService.getAppDefinitions(streamDefinition))); } diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionTests.java b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionTests.java index 5c7e7cb71b..bb543ab334 100644 --- a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionTests.java +++ b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionTests.java @@ -19,17 +19,14 @@ import java.util.List; import java.util.Map; -import org.junit.Test; + +import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.core.dsl.ParseException; import org.springframework.cloud.dataflow.core.dsl.StreamParser; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.*; +import static org.assertj.core.api.Assertions.assertThat; /** * @author Mark Fisher @@ -198,17 +195,15 @@ public void destinationsForbiddenInComposedApps() { new StreamDefinition("test", ":foo > boot"); } catch (ParseException expected) { - assertThat(expected.getMessage(), - containsString("A destination is not supported in this kind of definition")); - assertThat(expected.getPosition(), is(0)); + assertThat(expected.getMessage()).contains("A destination is not supported in this kind of definition"); + assertThat(expected.getPosition()).isEqualTo(0); } try { new StreamDefinition("test", "bart | goo > :foo"); } catch (ParseException expected) { - assertThat(expected.getMessage(), - containsString("A destination is not supported in this kind of definition")); - assertThat(expected.getPosition(), is(13)); + assertThat(expected.getMessage()).contains("A destination is not supported in this kind of definition"); + assertThat(expected.getPosition()).isEqualTo(13); } } diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionTests.java b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionTests.java index e468f0f0f1..9be2749ef0 100644 --- a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionTests.java +++ b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionTests.java @@ -20,11 +20,11 @@ import java.util.HashMap; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.*; + -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; /** * @author Thomas Risberg @@ -61,7 +61,8 @@ public void testPackageProtectedConstructor() { @Test public void testBuilder() { - TaskDefinition definition = new TaskDefinition.TaskDefinitionBuilder() + new TaskDefinition.TaskDefinitionBuilder(); + TaskDefinition definition = TaskDefinition.TaskDefinitionBuilder .from(new TaskDefinition("test", "timestamp")) .build(); assertEquals("test", definition.getName()); @@ -75,8 +76,8 @@ public void testEquality() { TaskDefinition definitionOne = new TaskDefinition("test", "timestamp"); TaskDefinition definitionTwo = new TaskDefinition("test", "timestamp"); - assertTrue("TaskDefinitions were expected to be equal.", definitionOne.equals(definitionTwo)); - assertTrue("TaskDefinitions were expected to be equal.", definitionOne.equals(definitionOne)); + assertEquals(definitionOne, definitionTwo, "TaskDefinitions were expected to be equal."); + assertEquals(definitionOne, definitionOne, "TaskDefinitions were expected to be equal."); } @@ -85,9 +86,9 @@ public void testInequality() { TaskDefinition definitionOne = new TaskDefinition("test", "timestamp"); TaskDefinition definitionFoo = new TaskDefinition("test", "foo"); - assertFalse("TaskDefinitions were not expected to be equal.", definitionOne.equals(definitionFoo)); - assertFalse("TaskDefinitions were not expected to be equal.", definitionOne.equals(null)); - assertFalse("TaskDefinitions were not expected to be equal.", definitionOne.equals("HI")); + assertNotEquals(definitionOne, definitionFoo, "TaskDefinitions were not expected to be equal."); + assertNotEquals(null, definitionOne, "TaskDefinitions were not expected to be equal."); + assertNotEquals("HI", definitionOne, "TaskDefinitions were not expected to be equal."); } @Test public void testHashCode() { @@ -95,8 +96,8 @@ public void testHashCode() { TaskDefinition definitionTwo = new TaskDefinition("test", "timestamp"); TaskDefinition definitionFoo = new TaskDefinition("test", "foo"); - assertTrue("TaskDefinitions' hashcodes were expected to be equal.", definitionOne.hashCode() == definitionTwo.hashCode()); - assertFalse("TaskDefinitions' hashcodes were not expected to be equal.", definitionOne.hashCode() == definitionFoo.hashCode()); + assertEquals(definitionOne.hashCode(), definitionTwo.hashCode(), "TaskDefinitions' hashcodes were expected to be equal."); + assertNotEquals(definitionOne.hashCode(), definitionFoo.hashCode(), "TaskDefinitions' hashcodes were not expected to be equal."); } @Test @@ -114,7 +115,8 @@ public void testDefinitionWithArguments() { public void testBuilderSetProperties() { Map properties = new HashMap<>(); properties.put("foo", "bar"); - TaskDefinition definition = new TaskDefinition.TaskDefinitionBuilder() + new TaskDefinition.TaskDefinitionBuilder(); + TaskDefinition definition = TaskDefinition.TaskDefinitionBuilder .from(new TaskDefinition("test", "timestamp")) .setProperties(properties) .build(); diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionToDslConverterTests.java b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionToDslConverterTests.java index 849c36accb..a1ac8391fd 100644 --- a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionToDslConverterTests.java +++ b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionToDslConverterTests.java @@ -17,11 +17,13 @@ package org.springframework.cloud.dataflow.core; import java.util.Arrays; +import java.util.Collections; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author Christian Tzolov @@ -38,8 +40,8 @@ public void testTaskDsl() { @Test public void testExclusionOfDataFlowAddedProperties() { - List dataFlowAddedProperties = Arrays.asList( - TaskDefinition.SPRING_CLOUD_TASK_NAME); + List dataFlowAddedProperties = Collections.singletonList( + TaskDefinition.SPRING_CLOUD_TASK_NAME); for (String key : dataFlowAddedProperties) { String dslText = "foo --" + key + "=boza"; @@ -76,20 +78,22 @@ public void autoQuotesOnStarProperties() { TaskDefinition taskDefinition = new TaskDefinition("fooTask", "jdbc-mssql --cron='/10 * * * * *' " + "--max-messages=-1 --password='******' --query='UPDATE top (100) ASSURANCE SET assurance_flag = 1 " + "OUTPUT Inserted.* WHERE assurance_flag IS NULL' " + - "--url='jdbc:sqlserver://db:1433;databaseName=Spring' --username='*****'"); + "--url='jdbc:sqlserver://db:1433;encrypt=false&databaseName=Spring' --username='*****'"); assertEquals("jdbc-mssql --cron='/10 * * * * *' " + "--max-messages=-1 --password='******' --query='UPDATE top (100) ASSURANCE SET assurance_flag = 1 " + "OUTPUT Inserted.* WHERE assurance_flag IS NULL' " + - "--url='jdbc:sqlserver://db:1433;databaseName=Spring' --username='*****'", + "--url='jdbc:sqlserver://db:1433;encrypt=false&databaseName=Spring' --username='*****'", new TaskDefinitionToDslConverter().toDsl(taskDefinition)); } - @Test(expected = IllegalArgumentException.class) + @Test public void compositeTaskDsl() { - TaskDefinition taskDefinition = new TaskDefinition("composedTaskName", "foo && bar"); - new TaskDefinitionToDslConverter().toDsl(taskDefinition); + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> { + TaskDefinition taskDefinition = new TaskDefinition("composedTaskName", "foo && bar"); + new TaskDefinitionToDslConverter().toDsl(taskDefinition); + }); } } diff --git a/spring-cloud-dataflow-dependencies/pom.xml b/spring-cloud-dataflow-dependencies/pom.xml index a6f508930c..2a8b76c946 100644 --- a/spring-cloud-dataflow-dependencies/pom.xml +++ b/spring-cloud-dataflow-dependencies/pom.xml @@ -4,11 +4,11 @@ spring-cloud-dataflow-dependencies-parent org.springframework.cloud - 2.9.2-SNAPSHOT - + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-build/spring-cloud-dataflow-dependencies-parent spring-cloud-dataflow-dependencies - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT pom spring-cloud-dataflow-dependencies Spring Cloud Data Flow Dependencies BOM designed to support consumption of Spring Cloud Data Flow from @@ -19,108 +19,118 @@ org.springframework.cloud spring-cloud-dataflow-shell - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-shell-core - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-completion - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-core-dsl - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-core - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-rest-client - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-configuration-metadata - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-rest-resource - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-registry - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-container-registry - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-composed-task-runner - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-server-core - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-server - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-autoconfigure - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-starter-dataflow-server - 2.9.2-SNAPSHOT - - - org.springframework.cloud - spring-cloud-starter-dataflow-ui - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-starter-dataflow-server - 2.9.2-SNAPSHOT + ${dataflow.version} test-jar org.springframework.cloud spring-cloud-dataflow-audit - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-platform-kubernetes - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-platform-cloudfoundry - 2.9.2-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-test - 2.9.2-SNAPSHOT + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-common-security-config-core + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-common-security-config-web + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-starter-common-security-config-web + ${dataflow.version} @@ -129,28 +139,36 @@ spring-docs - scp://static.springframework.org/var/www/domains/springframework.org/static/htdocs/spring-cloud/docs/${project.artifactId}/${project.version} + scp://static.springframework.org/var/www/domains/springframework.org/static/htdocs/spring-cloud/docs/${project.artifactId}/${dataflow.version} repo.spring.io Spring Release Repository - https://repo.spring.io/libs-release-local + https://repo.spring.io/libs-staging-local repo.spring.io Spring Snapshot Repository - https://repo.spring.io/libs-snapshot-local + https://repo.spring.io/snapshot spring + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + spring-snapshots Spring Snapshots - https://repo.spring.io/libs-snapshot-local + https://repo.spring.io/snapshot true @@ -158,25 +176,25 @@ spring-milestones Spring Milestones - https://repo.spring.io/libs-milestone-local + https://repo.spring.io/milestone false - - spring-releases - Spring Releases - https://repo.spring.io/release + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 false - - - + spring-snapshots Spring Snapshots - https://repo.spring.io/libs-snapshot-local + https://repo.spring.io/snapshot true @@ -184,7 +202,7 @@ spring-milestones Spring Milestones - https://repo.spring.io/libs-milestone-local + https://repo.spring.io/milestone false diff --git a/spring-cloud-dataflow-docs/pom.xml b/spring-cloud-dataflow-docs/pom.xml index 8449163692..8f91107e26 100644 --- a/spring-cloud-dataflow-docs/pom.xml +++ b/spring-cloud-dataflow-docs/pom.xml @@ -4,69 +4,91 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-docs Spring Cloud Data Flow Docs Spring Cloud Data Flow Docs + jar ${basedir}/.. - 0.2.1.RELEASE + 0.2.5 + 3.4.1 + ${basedir}/.. org.springframework.cloud spring-cloud-dataflow-configuration-metadata + ${project.version} org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-registry + ${project.version} org.springframework.cloud spring-cloud-dataflow-rest-resource + ${project.version} org.springframework.cloud spring-cloud-dataflow-server-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-server + ${project.version} org.springframework.cloud spring-cloud-dataflow-rest-client + ${project.version} org.springframework.cloud spring-cloud-dataflow-shell-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-completion - - - io.spring.docresources - spring-doc-resources - ${docs.resources.version} - zip - true + ${project.version} + + + + org.apache.maven.plugins + maven-surefire-plugin + + false + + + + full + + org.apache.maven.plugins + maven-jar-plugin + 3.3.0 + org.apache.maven.plugins maven-javadoc-plugin - 2.10.1 + ${maven-javadoc-plugin.version} attach-javadocs @@ -84,7 +106,7 @@ ${basedir}/src/main/javadoc/spring-javadoc.css - https://docs.spring.io/spring-framework/docs/${spring.version}/javadoc-api/ + https://docs.spring.io/spring-framework/docs/${spring-framework.version}/javadoc-api/ https://docs.spring.io/spring-shell/docs/current/api/ @@ -92,29 +114,10 @@ - - org.apache.maven.plugins - maven-dependency-plugin - - - unpack-doc-resources - - unpack-dependencies - - generate-resources - - io.spring.docresources - spring-doc-resources - zip - true - ${project.build.directory}/refdocs/ - - - - org.apache.maven.plugins maven-resources-plugin + ${maven-resources-plugin.version} copy-asciidoc-resources @@ -137,7 +140,14 @@ org.asciidoctor asciidoctor-maven-plugin - 1.5.6 + 2.2.4 + + + io.spring.asciidoctor.backends + spring-asciidoctor-backends + 0.0.5 + + ${project.build.directory}/refdocs/ ${project.build.directory}/generated-docs @@ -170,11 +180,11 @@ process-asciidoc - html5 + spring-html highlight.js book - // these attributes are required to use the doc resources + shared css/ spring.css diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/api-guide.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/api-guide.adoc index d2be3915bb..4e9c99412c 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/api-guide.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/api-guide.adoc @@ -15,7 +15,44 @@ In fact, the Spring Cloud Data Flow shell is a first-class consumer of that API. TIP: If you plan to use the REST API with Java, you should consider using the provided Java client (`DataflowTemplate`) that uses the REST API internally. +[[api-guide-version]] +=== HTTP Version +Spring Cloud Data Flow establishes a RESTful API version that is updated when there is a breaking change to the API. +The API version can be seen at the end of the home page of Spring Cloud Data Flow as shown in the example below: + +==== +[source,json] +---- +{ + "_links": { + "dashboard": { "href" : "http://localhost:9393/dashboard" }, + ... + }, + "api.revision":15 +} +---- +==== +The table below shows the SCDF Release version and its current RESTful API version. + +|=== +| SCDF Version | API Version + +| 2.11.x +| 14 + +| 2.10.x +| 14 + +| 2.9.x +| 14 + +| 2.8.x +| 14 + +| 2.7.x +| 14 +|=== [[api-guide-overview-http-verbs]] === HTTP verbs @@ -109,6 +146,7 @@ The API includes the following resources: * <> * <> +* <> * <> * <> * <> @@ -587,6 +625,67 @@ include::{snippets}/app-registry-documentation/unregistering-all-applications/cu include::{snippets}/app-registry-documentation/unregistering-all-applications/http-response.adoc[] +[[api-guide-resources-schema-info]] +=== Schema Information + +The schema information endpoint provides information about the supported Spring Boot schema versions for Task and Batch applications and the available Schema Targets. + +The following topics provide more details: + +* <> +* <> + +[[api-guide-resources-schema-info-versions]] +==== List All Schema Versions + +The schema endpoint provides for listing supported Spring Boot versions. + +The following topics provide more details: + +* <> +* <> +* <> + +[[api-guide-resources-schema-info-versions-request-structure]] +===== Request Structure + +include::{snippets}/schema-documentation/schema-versions/http-request.adoc[] + +[[api-guide-resources-schema-info-versions-example-request]] +===== Example Request + +include::{snippets}/schema-documentation/schema-versions/curl-request.adoc[] + +[[api-guide-resources-schema-info-versions-response-structure]] +===== Response Structure + +include::{snippets}/schema-documentation/schema-versions/http-response.adoc[] + +[[api-guide-resources-schema-info-targets]] +==== List All Schema Targets + +The schema endpoint provides for listing supported Schema Targets. + +The following topics provide more details: + +* <> +* <> +* <> + +[[api-guide-resources-schema-info-targets-request-structure]] +===== Request Structure + +include::{snippets}/schema-documentation/schema-targets/http-request.adoc[] + +[[api-guide-resources-schema-info-targets-example-request]] +===== Example Request + +include::{snippets}/schema-documentation/schema-targets/curl-request.adoc[] + +[[api-guide-resources-schema-info-targets-response-structure]] +===== Response Structure + +include::{snippets}/schema-documentation/schema-targets/http-response.adoc[] [[api-guide-resources-audit-records]] === Audit Records @@ -1763,6 +1862,8 @@ The following topics provide more details: * <> * <> * <> +* <> +* <> * <> * <> * <> @@ -1770,9 +1871,10 @@ The following topics provide more details: [[api-guide-resources-task-executions-launching]] -==== Launching a Task +==== Launching a Task (Legacy) + +Launching a task is done by requesting the creation of a new task execution. This endpoint will fail if the task is registered as a Spring Boot 3 application. -Launching a task is done by requesting the creation of a new task execution. The following topics provide more details: * <> @@ -1780,8 +1882,6 @@ The following topics provide more details: * <> * <> - - [[api-guide-resources-task-executions-launching-request-structure]] ===== Request Structure @@ -1807,6 +1907,45 @@ include::{snippets}/task-executions-documentation/launch-task/curl-request.adoc[ include::{snippets}/task-executions-documentation/launch-task/http-response.adoc[] +[[api-guide-resources-task-executions-launching-boot3]] +==== Launching a Task + +Launching a task is done by requesting the creation of a new task execution. The response will contain an execution id and a schema target. + +The following topics provide more details: + +* <> +* <> +* <> +* <> + + +[[api-guide-resources-task-executions-launching-boot3-request-structure]] +===== Request Structure + +include::{snippets}/task-executions-documentation/launch-task-boot3/http-request.adoc[] + + + +[[api-guide-resources-task-executions-launching-boot3-request-parameters]] +===== Request Parameters + +include::{snippets}/task-executions-documentation/launch-task-boot3/request-parameters.adoc[] + + + +[[api-guide-resources-task-executions-launching-boot3-example-request]] +===== Example Request + +include::{snippets}/task-executions-documentation/launch-task-boot3/curl-request.adoc[] + + +[[api-guide-resources-task-executions-launching-boot3-response-structure]] +===== Response Structure + +include::{snippets}/task-executions-documentation/launch-task-boot3/http-response.adoc[] + + [[api-guide-resources-task-executions-stopping]] ==== Stopping a Task @@ -1929,7 +2068,132 @@ include::{snippets}/task-executions-documentation/list-task-executions-by-name/c include::{snippets}/task-executions-documentation/list-task-executions-by-name/http-response.adoc[] +[[api-guide-resources-task-thin-executions-list]] +==== List All Task Thin Executions + +The task executions endpoint lets you list all task executions with only top-level data. +The following topics provide more details: + +* <> +* <> +* <> +* <> + +[[api-guide-resources-task-thin-executions-list-request-structure]] +===== Request Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions/http-request.adoc[] + +[[api-guide-resources-task-thin-executions-list-request-parameters]] +===== Request Parameters + +include::{snippets}/task-executions-documentation/list-task-thin-executions/query-parameters.adoc[] + +[[api-guide-resources-task-thin-executions-list-example-request]] +===== Example Request +include::{snippets}/task-executions-documentation/list-task-thin-executions/curl-request.adoc[] + +[[api-guide-resources-task-thin-executions-list-response-structure]] +===== Response Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions/http-response.adoc[] + +[[api-guide-resources-task-thin-executions-list-by-name]] +==== List All Task Thin Executions With a Specified Task Name + +The task thin executions endpoint lets you list task executions with a specified task name. +The following topics provide more details: + +* <> +* <> +* <> +* <> + + + +[[api-guide-resources-task-thin-executions-list-by-name-request-structure]] +===== Request Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/http-request.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-by-name-request-parameters]] +===== Request Parameters + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/query-parameters.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-by-name-example-request]] +===== Example Request + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/curl-request.adoc[] + +[[api-guide-resources-task-thin-executions-list-by-name-response-structure]] +===== Response Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/http-response.adoc[] + +[[api-guide-resources-task-thin-executions-list-request-structure]] +===== Request Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions/http-request.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-request-parameters]] +===== Request Parameters + +include::{snippets}/task-executions-documentation/list-task-thin-executions/request-parameters.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-example-request]] +===== Example Request + +include::{snippets}/task-executions-documentation/list-task-thin-executions/curl-request.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-response-structure]] +===== Response Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions/http-response.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-by-name]] +==== List All Task Thin Executions With a Specified Task Name + +The task thin executions endpoint lets you list task executions with a specified task name. +The following topics provide more details: + +* <> +* <> +* <> +* <> + +[[api-guide-resources-task-thin-executions-list-by-name-request-structure]] +===== Request Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/http-request.adoc[] + +[[api-guide-resources-task-thin-executions-list-by-name-request-parameters]] +===== Request Parameters + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/request-parameters.adoc[] + +[[api-guide-resources-task-thin-executions-list-by-name-example-request]] +===== Example Request + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/curl-request.adoc[] + +[[api-guide-resources-task-thin-executions-list-by-name-response-structure]] +===== Response Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/http-response.adoc[] [[api-guide-resources-task-executions-detail]] ==== Task Execution Detail @@ -1947,9 +2211,9 @@ The following topics provide more details: [[api-guide-resources-task-executions-detail-request-structure]] ===== Request Structure -include::{snippets}/task-executions-documentation/launch-task-display-detail/http-request.adoc[] +include::{snippets}/task-executions-documentation/get-task-display-detail/http-request.adoc[] -include::{snippets}/task-executions-documentation/launch-task-display-detail/path-parameters.adoc[] +include::{snippets}/task-executions-documentation/get-task-display-detail/path-parameters.adoc[] @@ -1963,17 +2227,57 @@ There are no request parameters for this endpoint. [[api-guide-resources-task-executions-detail-example-request]] ===== Example Request -include::{snippets}/task-executions-documentation/launch-task-display-detail/curl-request.adoc[] +include::{snippets}/task-executions-documentation/get-task-display-detail/curl-request.adoc[] [[api-guide-resources-task-executions-detail-response-structure]] ===== Response Structure -include::{snippets}/task-executions-documentation/launch-task-display-detail/http-response.adoc[] +include::{snippets}/task-executions-documentation/get-task-display-detail/http-response.adoc[] + + +[[api-guide-resources-task-executions-detail-by-external-id]] +==== Task Execution Detail by External Id + +The task executions endpoint lets you get the details about a task execution. +The following topics provide more details: + +* <> +* <> +* <> +* <> + + + +[[api-guide-resources-task-executions-detail-by-external-id-request-structure]] +===== Request Structure + +include::{snippets}/task-executions-documentation/get-task-display-detail-by-external-id/http-request.adoc[] +include::{snippets}/task-executions-documentation/get-task-display-detail-by-external-id/path-parameters.adoc[] + +[[api-guide-resources-task-executions-detail-by-external-id-request-parameters]] +===== Request Parameters + +There are no request parameters for this endpoint. + + + +[[api-guide-resources-task-executions-detail-by-external-id-example-request]] +===== Example Request + +include::{snippets}/task-executions-documentation/get-task-display-detail-by-external-id/curl-request.adoc[] + + + +[[api-guide-resources-task-executions-detail-by-external-id-response-structure]] +===== Response Structure + +include::{snippets}/task-executions-documentation/get-task-display-detail-by-external-id/http-response.adoc[] + [[api-guide-resources-task-executions-delete]] ==== Delete Task Execution @@ -2068,7 +2372,7 @@ The following topics provide more details: [[api-guide-resources-task-executions-current-count-request-structure]] ===== Request Structure -include::{snippets}/task-executions-documentation/launch-task-current-count/http-request.adoc[] +include::{snippets}/task-executions-documentation/get-task-current-count/http-request.adoc[] [[api-guide-resources-task-executions-current-count-request-parameters]] ===== Request Parameters @@ -2078,12 +2382,12 @@ There are no request parameters for this endpoint. [[api-guide-resources-task-executions-current-count-example-request]] ===== Example Request -include::{snippets}/task-executions-documentation/launch-task-current-count/curl-request.adoc[] +include::{snippets}/task-executions-documentation/get-task-current-count/curl-request.adoc[] [[api-guide-resources-task-executions-current-count-response-structure]] ===== Response Structure -include::{snippets}/task-executions-documentation/launch-task-current-count/http-response.adoc[] +include::{snippets}/task-executions-documentation/get-task-current-count/http-response.adoc[] [[api-guide-resources-job-executions]] @@ -2737,6 +3041,7 @@ include::{snippets}/job-step-executions-documentation/step-progress/curl-request include::{snippets}/job-step-executions-documentation/step-progress/http-response.adoc[] +NOTE: The following fields in the stepExecutionHistory are deprecated and will be removed in a future release: rollbackCount, readCount, writeCount, filterCount, readSkipCount, writeSkipCount, processSkipCount, durationPerRead. [[api-guide-resources-runtime-information-applications]] @@ -2962,3 +3267,34 @@ include::{snippets}/task-logs-documentation/get-logs-by-task-id/curl-request.ado ===== Response Structure include::{snippets}/task-logs-documentation/get-logs-by-task-id/http-response.adoc[] + +[[api-guide-openapi]] +== OpenAPI + +The https://springdoc.org/#Introduction[Springdoc] library is integrated with the server in an opt-in fashion. Once enabled, it provides OpenAPI3 documentation and a Swagger UI. + +To enable, set the following properties in your `application.yml` prior to launching the server: +```yaml +springdoc: + api-docs: + enabled: true + swagger-ui: + enabled: true +``` +The properties can also be set on the command line: +```shell +-Dspringdoc.api-docs.enabled=true -Dspringdoc.swagger-ui.enabled=true +``` +or as environment variables: +```shell +SPRINGDOC_APIDOCS_ENABLED=true +SPRINGDOC_SWAGGERUI_ENABLED=true +``` + +Once enabled, the OpenAPI3 docs and Swagger UI are available at the `/v3/api-docs` and `/swagger-ui/index.html` URIs, respectively (eg. http://localhost:9393/v3/api-docs). + +TIP: The Swagger UI will be initially be blank. Type in "/v3/api-docs/" in the "Explore" bar and click "Explore". + +TIP: If you try out the API's in the Swagger UI and get errors related to `"No property string found for type"` try replacing the **pageable** parameter with `{ }` or removing its `"sort"` attribute. + +There are a plethora of available https://springdoc.org/#properties[OpenAPI] and https://springdoc.org/#swagger-ui-properties[Swagger UI] properties to configure the feature. \ No newline at end of file diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-building.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-building.adoc deleted file mode 100644 index 25628378e6..0000000000 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-building.adoc +++ /dev/null @@ -1,81 +0,0 @@ -[appendix] -[[building]] -== Building - -This appendix describes how to build Spring Cloud Data Flow. - -To build the source, you need to install JDK 1.8. - -The build uses the Maven wrapper so that you do not have to install a specific version of Maven. - -The main build command is as follows: - -==== -[source,bash] ----- -$ ./mvnw clean install ----- -==== - -To speed up the build, you can add `-DskipTests` to avoid running the tests. - -NOTE: You can also install Maven (>=3.3.3) yourself and run the `mvn` command in place of `./mvnw` in the examples below. -If you do that, you also might need to add `-P spring` if your local Maven settings do not contain repository declarations for Spring pre-release artifacts. - -NOTE: You might need to increase the amount of memory available to Maven by setting a `MAVEN_OPTS` environment variable with a value similar to `-Xmx512m -XX:MaxPermSize=128m`. -We try to cover this in the `.mvn` configuration, so, if you find you have to do it to make a build succeed, please raise a ticket to get the settings added to source control. - -=== Documentation - -There is a `full` profile that generates documentation. You can build only the documentation by using the following command: - -==== -[source,bash] ----- -$ ./mvnw clean package -DskipTests -P full -pl {project-artifactId} -am ----- -==== - -=== Working with the Code - -If you do not have a favorite IDE, we recommend that you use https://spring.io/tools[Spring Tools Suite] or https://www.eclipse.org[Eclipse] when working with the code. -We use the https://www.eclipse.org/m2e/[m2eclipse] Eclipse plugin for Maven support. -Other IDEs and tools generally also work without issue. - - - -==== Importing into Eclipse with m2eclipse - -We recommend the https://www.eclipse.org/m2e/[m2eclipe] eclipse plugin when working with Eclipse. -If you do not already have m2eclipse installed, it is available from the Eclipse marketplace. - -Unfortunately, m2e does not yet support Maven 3.3. -Consequently, once the projects are imported into Eclipse, you also need to tell m2eclipse to use the `.settings.xml` file for the projects. -If you do not do this, you may see many different errors related to the POMs in the projects. -To do so: - -. Open your Eclipse preferences. -. Expand the *Maven preferences*. -. Select *User Settings*. -. In the *User Settings* field, click *Browse* and navigate to the Spring Cloud project you imported. -. Select the `.settings.xml` file in that project. -. Click *Apply*. -. Click *OK*. - -NOTE: Alternatively, you can copy the repository settings from Spring Cloud's https://github.com/spring-cloud/spring-cloud-build/blob/master/.settings.xml[`.settings.xml`] file into your own `~/.m2/settings.xml`. - - - -==== Importing into Eclipse without m2eclipse - -If you prefer not to use m2eclipse, you can generate Eclipse project metadata by using the following command: - -==== -[source,bash] ----- -$ ./mvnw eclipse:eclipse ----- -==== - -You can import the generated Eclipse projects by selecting *Import existing projects* -from the *File* menu. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-contributing.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-contributing.adoc deleted file mode 100644 index a5fda2cdc7..0000000000 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-contributing.adoc +++ /dev/null @@ -1,32 +0,0 @@ -[appendix] -[[contributing]] -== Contributing - -Spring Cloud is released under the non-restrictive Apache 2.0 license and follows a very standard Github development process, using Github tracker for issues and merging pull requests into the master branch. -If you want to contribute even something trivial, please do not hesitate, but do please follow the guidelines in this appendix. - - - -=== Sign the Contributor License Agreement - -Before we accept a non-trivial (anything more than correcting a typographical error) patch or pull request, we need you to sign the https://cla.pivotal.io[contributor's agreement]. -Signing the contributor's agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you get an author credit if we do. -Active contributors might be asked to join the core team and be given the ability to merge pull requests. - - - -=== Code Conventions and Housekeeping - -None of the following guidelines is essential for a pull request, but they all help your fellow developers understand and work with your code. -They can also be added after the original pull request but before a merge. - -* Use the Spring Framework code format conventions. If you use Eclipse, you can import formatter settings by using the `eclipse-code-formatter.xml` file from the https://github.com/spring-cloud/spring-cloud-build/blob/master/spring-cloud-dependencies-parent/eclipse-code-formatter.xml[Spring Cloud Build] project. -If you use IntelliJ, you can use the https://plugins.jetbrains.com/plugin/6546[Eclipse Code Formatter Plugin] to import the same file. -* Make sure all new `.java` files have a simple Javadoc class comment with at least an `@author` tag identifying you, and preferably at least a paragraph describing the class's purpose. -* Add the ASF license header comment to all new `.java` files (to do so, copy it from existing files in the project). -* Add yourself as an `@author` to the .java files that you modify substantially (more than cosmetic changes). -* Add some Javadocs and, if you change the namespace, some XSD doc elements. -* A few unit tests would help a lot as well. Someone has to do it, and your fellow developers appreciate the effort. -* If no one else uses your branch, rebase it against the current master (or other target branch in the main project). -* When writing a commit message, follow https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html[these conventions]. -If you fix an existing issue, add `Fixes gh-XXXX` (where XXXX is the issue number) at the end of the commit message. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-create-containers.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-create-containers.adoc new file mode 100644 index 0000000000..6b4803344e --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-create-containers.adoc @@ -0,0 +1,120 @@ +[[create-containers]] + +=== Create containers for architectures not supported yet. +In the case of macOS on M1 the performance of amd64/x86_64 is unacceptable. +We provide a set of scripts that can be used to download specific versions of published artifacts. +We also provide a script that will create a container using the downloaded artifact for the host platform. +In the various projects you will find then in `src/local` or `local` folders. + +[cols="1,4,6"] +|=== +|Project | Scripts | Notes + +| [.small]#Data Flow# +a| +[.small]#`src/local/download-apps.sh`# + +[.small]#`src/local/create-containers.sh`# + +a|[.small]#Download or create container for: `spring-cloud-dataflow-server`,# + +[.small]#`spring-cloud-dataflow-composed-task-runner`,# + +[.small]#`spring-cloud-dataflow-single-step-batch-job`,# + +[.small]#`spring-cloud-dataflow-tasklauncher-sink-kafka`,# + +[.small]#`spring-cloud-dataflow-tasklauncher-sink-rabbit`# + + +|[.small]#Skipper# +a| +[.small]#`local/download-app.sh`# + +[.small]#`local/create-container.sh`# +|[.small]#Download or create container for: `spring-cloud-skipper-server`# + +|[.small]#Stream Applications# +a| +[.small]#`local/download-apps.sh`# + +[.small]#`local/create-containers.sh`# + +[.small]#`local/pack-containers.sh`# +| `create-containers.sh` uses `jib` + +`pack-containers.sh` uses `pack` +|=== + +==== Scripts in `spring-cloud-dataflow` +===== `src/local/download-apps.sh` +Downloads all applications needed by `create-containers.sh` from Maven repository. + +*If the timestamp of snapshots matches the download will be skipped.* + +Usage: `download-apps.sh [version]` + +* `version` is the dataflow-server version like `2.11.3`. Default is `2.11.3-SNAPSHOT` + +===== `src/local/create-containers.sh` +Creates all containers and pushes to local docker registry. + +This script requires link:https://github.com/GoogleContainerTools/jib/tree/master/jib-cli[jib-cli] + +Usage: `create-containers.sh [version] [jre-version]` + +* `version` is the dataflow-server version like `2.11.3`. Default is `2.11.3-SNAPSHOT` +* `jre-version` should be one of 11, 17. Default is 11 + +==== Scripts in `spring-cloud-skipper` + +===== `local/download-app.sh` +Downloads all applications needed by `create-containers.sh` from Maven repository. + +*If the timestamp of snapshots matches the download will be skipped.* + +Usage: `download-app.sh [version]` + +* `version` is the skipper version like `2.11.3` or default is `2.11.3-SNAPSHOT` + +===== `local/create-container.sh` +Creates all containers and pushes to local docker registry. +This script requires link:https://github.com/GoogleContainerTools/jib/tree/master/jib-cli[jib-cli] + +Usage: `create-containers.sh [version] [jre-version]` + +* `version` is the skipper version like `2.11.3` or default is `2.11.3-SNAPSHOT` +* `jre-version` should be one of 11, 17 + +==== Scripts in `stream-applications` + +===== `local/download-apps.sh` + +Downloads all applications needed by `create-containers.sh` from Maven repository. + +*If the timestamp of snapshots matches the download will be skipped.* + +Usage: `download-apps.sh [version] [broker] [filter]` + +* `version` is the stream applications version like `3.2.1` or default is `3.2.2-SNAPSHOT` +* `broker` is one of rabbitmq, rabbit or kafka +* `filter` is a name of an application or a partial name that will be matched. + +===== `local/create-containers.sh` +Creates all containers and pushes to local docker registry. + +This script requires link:https://github.com/GoogleContainerTools/jib/tree/master/jib-cli[jib-cli] + +Usage: `create-containers.sh [version] [broker] [jre-version] [filter]` + +* `version` is the stream-applications version like `3.2.1` or default is `3.2.2-SNAPSHOT` +* `broker` is one of rabbitmq, rabbit or kafka +* `jre-version` should be one of 11, 17 +* `filter` is a name of an application or a partial name that will be matched. + +If the file is not present required to create the container the script will skip the one. + +===== `local/pack-containers.sh` +Creates all containers and pushes to local docker registry. + +This script requires link:https://buildpacks.io/docs/tools/pack[packeto pack] + +Usage: `pack-containers.sh [version] [broker] [jre-version] [filter]` + +* `version` is the stream-applications version like `3.2.1` or default is `3.2.2-SNAPSHOT` +* `broker` is one of rabbitmq, rabbit or kafka +* `jre-version` should be one of 11, 17 +* `filter` is a name of an application or a partial name that will be matched. + +If the required file is not present to create the container the script will skip that one. + +NOTE: If any parameter is provided all those to the left of it should be considered required. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-dataflow-template.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-dataflow-template.adoc index f37dc76c99..bb99fc27b9 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-dataflow-template.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-dataflow-template.adoc @@ -180,6 +180,3 @@ To configure _Basic Authentication_, the following setup is required: ---- ==== -You can find a sample application as part of the -https://github.com/spring-cloud/spring-cloud-dataflow-samples/tree/master/dataflow-template-example[spring-cloud-dataflow-samples] repository -on GitHub. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-development-tasks.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-development-tasks.adoc new file mode 100644 index 0000000000..3e80ef1ddc --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-development-tasks.adoc @@ -0,0 +1,7 @@ +[appendix] +[[development-tasks]] +== Development Tasks + +include::appendix-extend-classpath.adoc[] +include::appendix-create-containers.adoc[] +include::appendix-local-k8s-development.adoc[] diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-extend-classpath.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-extend-classpath.adoc new file mode 100644 index 0000000000..5dbe44a8ff --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-extend-classpath.adoc @@ -0,0 +1,47 @@ +[[extend-classpath]] + +=== Extending application classpath + +Users may require the addition of dependencies to the existing Stream applications or specific database drivers to Dataflow and Skipper or any of the other containers provider by the project. + +NOTE: The Spring Cloud Dataflow repository contains scripts to help with this task. The examples below assume you have cloned the `spring-cloud-dataflow` repository and are executing the scripts from `src/add-deps`. + +==== JAR File + +_We suggest you publish the updated jar it to a private Maven repository and that the Maven Coordinates of the private registry is then used to register application with SCDF._ + +===== Example + +This example: +* assumes the jar is downloaded to `${appFolder}/${appName}-${appVersion}.jar` +* adds the dependencies and then publishes the jar to Maven local. + +[source,shell] +.... +./gradlew -i publishToMavenLocal \ + -P appFolder="." \ + -P appGroup="org.springframework.cloud" \ + -P appName="spring-cloud-dataflow-server" \ + -P appVersion="2.11.3" \ + -P depFolder="./extra-libs" +.... + +NOTE: Use the `publishMavenPublicationToMavenRepository` task to publish to a remote repository. Update the `gradle.properties` with the remote repository details. Alternatively move `repoUser` and `repoPassword` to ~/.gradle/gradle.properties + +==== Containers + +In order to create a container we suggest using https://buildpacks.io/docs/for-platform-operators/how-to/integrate-ci/pack[paketo pack cli] to create a container from the jar created in previous step. + +[source, shell] +.... +REPO=springcloud/spring-cloud-dataflow-server +TAG=2.11.3 +JAR=build/spring-cloud-dataflow-server-${TAG}.jar +JAVA_VERSION=8 +pack build --builder gcr.io/paketo-buildpacks/builder:base \ + --path "$JAR" \ + --trust-builder --verbose \ + --env BP_JVM_VERSION=${JAVA_VERSION} "$REPO:$TAG-jdk${JAVA_VERSION}-extra" +.... + +NOTE: Publish the container to a private container registry and register the application docker uri with SCDF. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-howto.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-howto.adoc index 41e131b6fd..d1e6a6b337 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-howto.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-howto.adoc @@ -9,28 +9,37 @@ That is also a great place to ask new questions (use the `spring-cloud-dataflow` We are also more than happy to extend this section. If you want to add a "`how-to`", you can send us a {github-code}[pull request]. - - === Configure Maven Properties +If applications are resolved by using the Maven repository you may want to configure the underlying resolver. You can set the Maven properties, such as the local Maven repository location, remote Maven repositories, authentication credentials, and proxy server properties through command-line properties when you start the Data Flow server. Alternatively, you can set the properties by setting the `SPRING_APPLICATION_JSON` environment property for the Data Flow server. -The remote Maven repositories need to be configured explicitly if the applications are resolved by using the Maven repository, except for a `local` Data Flow server. -The other Data Flow server implementations (which use Maven resources for application artifacts resolution) have no default value for remote repositories. -The `local` server has `https://repo.spring.io/libs-snapshot` as the default remote repository. +For all Data Flow server installations, the following list of remote Maven repositories are configured by default: + +* Maven Central (`https://repo.maven.apache.org/maven2`) +* Spring Snapshots (`https://repo.spring.io/snapshot`) +* Spring Milestones (`https://repo.spring.io/milestone`) + +If the default is already explicitly configured (exact match on the repo url) then it will not be included. + +If the applications exist on another remote repository, besides the pre-configured ones, that remote repository must be configured explicitly and will be added to the pre-configured default list. + +TIP: To skip the automatic default repositories behavior altogether, set the `maven.include-default-remote-repos` property to `false`. To pass the properties as command-line options, run the server with a command similar to the following: ==== -[source,bash] +[source,shell] ---- -$ java -jar .jar --maven.localRepository=mylocal ---maven.remote-repositories.repo1.url=https://repo1 ---maven.remote-repositories.repo1.auth.username=repo1user ---maven.remote-repositories.repo1.auth.password=repo1pass ---maven.remote-repositories.repo2.url=https://repo2 --maven.proxy.host=proxyhost ---maven.proxy.port=9018 --maven.proxy.auth.username=proxyuser +java -jar .jar --maven.localRepoitory=mylocal \ +--maven.remote-repositories.repo1.url=https://repo1 \ +--maven.remote-repositories.repo1.auth.username=repo1user \ +--maven.remote-repositories.repo1.auth.password=repo1pass \ +--maven.remote-repositories.repo2.url=https://repo2 \ +--maven.proxy.host=proxyhost \ +--maven.proxy.port=9018 \ +--maven.proxy.auth.username=proxyuser \ --maven.proxy.auth.password=proxypass ---- ==== @@ -78,16 +87,34 @@ SPRING_APPLICATION_JSON='{ ---- ==== -NOTE: Depending on the Spring Cloud Data Flow server implementation, you may have to pass the environment properties by using the platform specific environment-setting capabilities. For instance, in Cloud Foundry, you would pass them as `cf set-env SPRING_APPLICATION_JSON`. +You can also set the properties as individual environment variables: +==== +[source,bash] +---- +export MAVEN_REMOTEREPOSITORIES_REPO1_URL=https://repo1 +export MAVEN_REMOTEREPOSITORIES_REPO1_AUTH_USERNAME=repo1user +export MAVEN_REMOTEREPOSITORIES_REPO1_AUTH_PASSWORD=repo1pass +export MAVEN_REMOTEREPOSITORIES_REPO2_URL=https://repo2 +export MAVEN_PROXY_HOST=proxyhost +export MAVEN_PROXY_PORT=9018 +export MAVEN_PROXY_AUTH_USERNAME=proxyuser +export MAVEN_PROXY_AUTH_PASSWORD=proxypass +---- +==== === Troubleshooting This section covers how to troubleshoot Spring Cloud Data Flow on your platform of choice. See the Troubleshooting sections of the microsite for link:https://dataflow.spring.io/docs/stream-developer-guides/troubleshooting/[Stream] and link:https://dataflow.spring.io/docs/batch-developer-guides/troubleshooting/[Batch] processing. +include::appendix-extend-classpath.adoc[] +include::appendix-create-containers.adoc[] +include::appendix-local-k8s-development.adoc[] [[faqs]] === Frequently Asked Questions In this section, we review the frequently asked questions for Spring Cloud Data Flow. See the https://dataflow.spring.io/docs/resources/faq/[Frequently Asked Questions] section of the microsite for more information. + +// TODO move the FAQ to reference guide. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-identity-provider-azure.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-identity-provider-azure.adoc index 551f74fee2..8ea1d8c26b 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-identity-provider-azure.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-identity-provider-azure.adoc @@ -247,6 +247,8 @@ $ java -jar spring-cloud-dataflow-shell.jar \ ---- ==== +NOTE: A Public Client requires *App Roles* with the value set the same as the internal permissions `[dataflow.create, dataflow.deploy, dataflow.destroy, dataflow.manage, dataflow.modify, dataflow.schedule, dataflow.view]` to ensure they are added to the access token. + Starting a public shell and (optionally) pass credentials as options: ==== diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-local-k8s-development.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-local-k8s-development.adoc new file mode 100644 index 0000000000..6f83aa4f84 --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-local-k8s-development.adoc @@ -0,0 +1,246 @@ +[[local-k8s-development]] + +=== Configure Kubernetes for local development or testing + +==== Prerequisites + +You will need to install kubectl and then kind or minikube for a local cluster. + +All the examples assume you have cloned the `spring-cloud-dataflow` repository and are executing the scripts from `deploy/k8s`. + +On macOS, you may need to install `realpath` from link:https://ports.macports.org/port/realpath/[Macports] or `brew install realpath` + +NOTE: The scripts require a shell like `bash` or `zsh` and should work on Linux, WSL 2 or macOS. + +==== Steps +* Choose Kubernetes provider. Kind, Minikube or remote GKE or TMC. +* Decide the namespace to use for deployment if not `default`. +* Configure Kubernetes and loadbalancer. +* Choose Broker with `export BROKER=kafka|rabbitmq` +* Build or Pull container images for Skipper and Data Flow Server. +* Deploy and Launch Spring Cloud Data Flow. +* Export Data Flow Server address to env. + +===== Kubernetes Provider + +_How do I choose between minikube and kind? kind will generally provide quicker setup and teardown time than Minikube. There is little to choose in terms of performance between the 2 apart from being able to configure limits on CPUs and memory when deploying minikube. So in the case where you have memory constraints or need to enforce memory limitations Minikube will be a better option._ + +===== Kubectl + +You will need to link:https://kubernetes.io/docs/tasks/tools/[install] kubectl in order to configure the Kubernetes cluster + +===== Kind + +Kind is Kubernetes in docker and ideal for local development. + +* link:https://kind.sigs.k8s.io/docs/user/quick-start/[Installation] +* link:https://kind.sigs.k8s.io/docs/user/loadbalancer/[LoadBalancer] + +The LoadBalancer will be installed by the `configure-k8s.sh` script by will require an update to a yaml file to provide the address range available to the LoadBalancer. + +===== Minikube + +Minikube uses one of a selection of drivers to provide a virtualization environment. + +* link:https://minikube.sigs.k8s.io/docs/start/[Installation] +* link:https://minikube.sigs.k8s.io/docs/start/#loadbalancer-deployments[LoadBalancer] + +NOTE: Delete existing Minikube installation if you have any. `minikube delete` + +===== Remote TMC Cluster + +link:https://tanzu.vmware.com/mission-control[Tanzu Mission Control] + +==== Building and loading containers. + +For local development you need control of the containers used in the local environment. + +In order to ensure to manage the specific versions of data flow and skipper containers you can set SKIPPER_VERSION and DATAFLOW_VERSION environmental variable and then invoke `./images/pull-dataflow.sh` and `./images/pull-skipper.sh` or if you want to use a locally built application you can invoke `./images/build-skipper-image.sh` and `./images/build-dataflow.sh` + +==== Configure k8s environment + +You can invoke one of the following scripts to choose the type of installation you are targeting: + +[source,shell] +---- +./k8s/use-kind.sh [] [] [] +./k8s/use-mk-docker.sh [] [] [] +./k8s/use-mk-kvm2.sh [] [] [] +./k8s/use-mk.sh [] [] [] # <1> +./k8s/use-tmc.sh [] [] [] +./k8s/use-gke.sh [] [] [] +---- +<1> must be one of `kvm2`, `docker`, `vmware`, `virtualbox`, `vmwarefusion` or `hyperkit`. `docker` is the recommended option for local development. + +NOTE: `` will be `default` if not provided. The default `` is `postgresql` and the default `` is `kafka`. + +Since these scripts export environmental variable they need to be executes as in the following example: + +[source,shell] +.... +source ./k8s/use-mk-docker.sh postgresql rabbitmq --namespace test-ns +.... + +===== TMC or GKE Cluster in Cloud + +The cluster must exist before use, and you should use the relevant cli to login before executing `source ./k8s/use-gke.sh` + +===== Create Local Cluster. + +The following script will create the local cluster. + +[source,shell] +.... +# Optionally add to control cpu and memory allocation. +export MK_ARGS="--cpus=8 --memory=12g" +./k8s/configure-k8s.sh +.... + +* For *kind* follow instruction to update `./k8s/yaml/metallb-configmap.yaml` and then apply using `kubectl apply -f ./k8s/yaml/metallb-configmap.yaml` + +* For *minikube* launch a new shell and execute `minikube tunnel` + +===== Deploy Spring Cloud Data Flow. + +The `use-*` scripts will configure the values of BROKER and DATABASE. + +====== Configure Broker +[source,shell] +.... +export BROKER= # <1> +.... +<1> one of `kafka` or `rabbitmq` + +====== Configure Database + +[source,shell] +.... +export DATABASE= # <1> +.... +<1> one of `mariadb` or `postgresql` + +Docker credentials need to be configured for Kubernetes to pull the various container images. + +For Docker Hub you can create a personal free account and use a personal access token as your password. + +Test your docker login using `./k8s/docker-login.sh` + +[source,shell] +.... +export DOCKER_SERVER=https://docker.io +export DOCKER_USER= +export DOCKER_PASSWORD= +export DOCKER_EMAIL= +.... + +Set the version of Spring Cloud Data Flow and Skipper. + +This example shows the versions of the current development snapshot. + +[source,shell] +.... +export DATAFLOW_VERSION=2.11.5-SNAPSHOT +export SKIPPER_VERSION=2.11.5-SNAPSHOT +.... + +Before you can install SCDF you will need to pull the following images to ensure they are present for uploading to the k8s cluster. + +You can configure the before `pull-app-images` and `install-scdf`: + +* `STREAM_APPS_RT_VERSION` Stream Apps Release Train Version. _Default is 2022.0.0_. +* `STREAM_APPS_VERSION` Stream Apps Version. _Default is 4.0.0_. + +Use: + +[source,shell] +.... +./images/pull-app-images.sh +./images/pull-dataflow.sh +./images/pull-skipper.sh +./images/pull-composed-task-runner.sh +.... + +[source,shell] +.... +./k8s/install-scdf.sh +source ./k8s/export-dataflow-ip.sh +.... + +NOTE: You can now execute scripts from `./shell` to deploy some simple streams and tasks. You can also run `./shell/shell.sh` to run the Spring Cloud Data Flow Shell. + + +If you want to start fresh you use the following to delete the SCDF deployment and then run `./k8s/install-scdf.sh` to install it again. + + +===== Delete the deployment from the cluster. + +[source,shell] +.... +./k8s/delete-scdf.sh +.... + +===== Delete the cluster + +This script will also delete the TMC cluster if you have configured one. + +[source,shell] +.... +./k8s/destroy-k8s.sh +.... + +==== Utilities +The following list of utilities may prove useful. + +[cols="2m,8"] +|=== +|Name | Description + +| link:https://k9scli.io/[k9s] | k9s is a text based monitor to explore the Kubernetes cluster. +| link:https://github.com/boz/kail[kail] | Extra and tail the logs of various pods based on various naming criteria. +|=== + +===== `kail` + + +* Using kail to log activity related to a specific stream. + +[source,shell] +---- +kail --label=spring-group-id= +---- +* Using kail to log all pods in specific namespace. + +[source,shell] +---- +kail --ns= +---- + +==== Scripts + +Some of the scripts apply to local containers as well and can be found in `src/local`, the Kubernetes specific scripts are in `deploy/k8s` + +[cols="5m,10"] +|=== +|Script |Description + +| ./images/build-app-images.sh | Build all images of Restaurant Sample Stream Apps +| ./images/pull-app-images.sh | Pull all images of Restaurant Sample Stream Apps from Docker Hub +| ./images/pull-dataflow.sh | Pull dataflow from DockerHub based on `DATAFLOW_VERSION`. +| ./images/pull-scdf-pro.sh | Pull Dataflow Pro from Tanzu Network based on `SCDF_PRO_VERSION`. +| ./images/pull-skipper.sh | Pull Skipper from DockerHub base on the `SKIPPER_VERSION`. +| ./images/build-dataflow-image.sh | Build a docker image from the local repo of Dataflow +| ./images/build-scdf-pro-image.sh | Build a docker image from the local repo of Dataflow Pro. Set `USE_PRO=true` in environment to use Dataflow Pro +| ./images/build-skipper-image.sh | Build a docker image from the local repo of Skipper. +| ./k8s/configure-k8s.sh | Configure the Kubernetes environment based on your configuration of K8S_DRIVER. +| ./k8s/delete-scdf.sh | Delete all Kubernetes resources create by the deployment. +| ./k8s/destroy-k8s.sh | Delete cluster, kind or minikube. +| ./k8s/export-dataflow-ip.sh | Export the url of the data flow server to `DATAFLOW_IP` +| ./k8s/export-http-url.sh | Export the url of the http source of a specific flow by name to `HTTP_APP_URL` +| ./k8s/install-scdf.sh | Configure and deploy all the containers for Spring Cloud Dataflow +| ./k8s/load-images.sh | Load all container images required by tests into kind or minikube to ensure you have control over what is used. +| ./k8s/load-image.sh | Load a specific container image into local kind or minikube. +| src/local/local-k8s-acceptance-tests.sh | Execute acceptance tests against cluster where `DATAFLOW_IP` is pointing. +| ./k8s/register-apps.sh | Register the Task and Stream apps used by the unit tests. +|=== + +IMPORTANT: Please report any errors with the scripts along with detail information about the relevant environment. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix.adoc index 9104731f46..8b4a2a16c4 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix.adoc @@ -12,6 +12,6 @@ Having trouble with Spring Cloud Data Flow, We'd like to help! include::appendix-dataflow-template.adoc[] include::appendix-howto.adoc[] -include::appendix-building.adoc[] -include::appendix-contributing.adoc[] include::appendix-identity-providers.adoc[] +include::spring-boot-3x.adoc[] +// include::appendix-development-tasks.adoc[] diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/applications.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/applications.adoc index cd56940c6d..2d7028a8d6 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/applications.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/applications.adoc @@ -3,126 +3,5 @@ [partintro] -- -A selection of pre-built link:https://cloud.spring.io/spring-cloud-stream-app-starters/[stream] and link:https://cloud.spring.io/spring-cloud-task-app-starters/[task or batch] starter applications for various data integration and processing scenarios to facilitate learning and experimentation. The table in the next section includes the pre-built applications at a glance. For more details, review how to <>. +A selection of pre-built applications for various data integration and processing scenarios to facilitate learning and experimentation can be found link:https://docs.spring.io/stream-applications/docs/current/reference/html/index.html#applications/[here]. -- - -== Available Applications -[width="100%",frame="topbot",options="header",subs=attributes] -|====================== -|Source |Processor |Sink |Task - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-sftp-source[sftp] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-tcp-client-processor[tcp-client] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-mqtt-sink[mqtt] -|link:https://docs.spring.io/spring-cloud-task-app-starters/docs/current/reference/htmlsingle/#_timestamp_task[timestamp] - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-jms-source[jms] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-scriptable-transform[scriptable-transform] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-log-sink[log] -|link:https://docs.spring.io/spring-cloud-task-app-starters/docs/current/reference/htmlsingle/#_composed_task_runner[composed-task-runner] - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-ftp-source[ftp] -|link:{scs-app-starters-docs-htmlsingle}/#spring-clound-stream-modules-transform-processor[transform] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-throughput-sink[throughput] -|link:https://docs.spring.io/spring-cloud-task-app-starters/docs/current/reference/htmlsingle/#_timestamp_batch_task[timestamp-batch] - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-time-source[time] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-header-enricher-processor[header-enricher] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-mongodb-sink[mongodb] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-load-generator-source[load-generator] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-python-http-processor[python-http] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-ftp-sink[ftp] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-syslog-source[syslog] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-twitter-sentiment-processor[twitter-sentiment] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-jdbc-sink[jdbc] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-aws-s3-source[s3] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-splitter[splitter] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-cassandra-sink[cassandra] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-loggregator-source[loggregator] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-image-recognition-processor[image-recognition] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-router-sink[router] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-trigger-source[triggertask (deprecated)] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-bridge-processor[bridge] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-redis-sink[redis-pubsub] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-twitterstream-source[twitterstream] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-pmml-processor[pmml] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-file-sink[file] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-mongodb-source[mongodb] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-python-jython-processor[python-jython] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-websocket-sink[websocket] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-gemfire-cq-source[gemfire-cq] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-groovy-transform-processor[groovy-transform] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-aws-s3-sink[s3] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-http-source[http] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-httpclient-processor[httpclient] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-rabbit-sink[rabbit] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-rabbit-source[rabbit] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-filter-processor[filter] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-counter-sink[counter] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-tcp-source[tcp] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-pose-estimation-processor[pose-estimation] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-pgcopy-sink[pgcopy] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-trigger-source[trigger] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-grpc-processor[grpc] -|link:https://github.com/spring-cloud-stream-app-starters/gpfdist[gpfdist] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-mqtt-source[mqtt] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-groovy-filter-processor[groovy-filter] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-sftp-sink[sftp] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-tcp-client-source[tcp-client] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-aggregator-processor[aggregator] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-task-launcher-dataflow-sink[task-launcher-dataflow] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-mail-source[mail] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-counter-processor[counter] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-hdfs-sink[hdfs] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-jdbc-source[jdbc] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-tensorflow-processor[tensorflow] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-tcp-sink[tcp] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-gemfire-source[gemfire] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-tasklaunchrequest-transform[tasklaunchrequest-transform (deprecated)] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-gemfire-sink[gemfire] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-file-source[file] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-object-detection-processor[object-detection] -| -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-sftp-dataflow-source[sftp-dataflow] -| -| -| -|====================== diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-carvel.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-carvel.adoc new file mode 100644 index 0000000000..67d50ab697 --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-carvel.adoc @@ -0,0 +1,266 @@ +[[configuration-carvel]] +== Deployment using Carvel + +Deployment of a carvel package requires the installation of tools and specific Kubernetes controllers. Then you will add the package repository to the cluster and install the application. + +For local minikube or kind cluster you can use: xref:local-k8s-development[Configure Kubernetes for local development or testing], and follow the instructions until the section _Deploy Spring Cloud Data Flow_ + +=== Required Tools + +* `kubectl` - Kubernetes CLI (Install with `brew install kubectl`) +* `carvel` - Packaging and Deployment tools + +Carvel CLI can be installed using: + +[source,shell] +.... +wget -O- https://carvel.dev/install.sh | bash +# or with curl... +curl -L https://carvel.dev/install.sh | bash +.... + +Alternative following the instructions at the bottom of the home page at link:https://carvel.dev/[carvel.dev] + +The following tools are use by the scripts. + +* `jq` - lightweight JSON parser +* `yq` - lightweight YAML parser +* `wget` - Invoke http requests. +* `dirname` provides the directory part of a filename. +* `readlink` provides absolute path of a relative link. + +NOTE: Some of these utilities are not installed in macOS or *nix by default but will be available from MacPorts or HomeBrew. + +=== Scripts + +These scripts assume you are connected to a Kubernetes cluster and `kubectl` is available. + +[cols="3m,3,6a"] +|=== +|Name | Arguments |Descriptions + +| start-deploy.sh +| [scdf-type] [namespace] [release\|snapshot] +| Configures environmental variables needs for the rest of the scripts. `BROKER`, `NS` and `SCDF_TYPE` are set. The default `NS` is `scdf`. The namespace will be created if it doesn't exist by `setup-scdf-repo.sh`. The default `SCDF_TYPE` is `oss`. _release\|snapshot_ and _scdf-type_ will determine the value of `PACKAGE_VERSION` set. + +| prepare-cluster.sh +| N/A +| Installs cert-manager, secretgen-controller and kapp-controller + +| carvel-use-template.sh +| [scdf-type] (oss, pro) +| Creates `scdf-values.yml` in current directory based on `scdf-pro-values.yml` or `scdf-oss-values.yml` + +| carvel-import-secret.sh +| [secret-namespace] [--import\|--placeholder] +| Creates an import secret, placeholder or import using secretgen-controller. +| setup-scdf-repo.sh +| [scdf-type] (oss, pro) +| Creates the namespace and installs the relevant Carvel package and credentials. If the optional _scdf-type_ is not provided the environmental variable `SCDF_TYPE` will be used. + +| configure-prometheus-proxy.sh +| [step] +| Configures Spring Boot Actuator properties for Data Flow, Skipper, Streams and Tasks. Default `step` is 10s + +| configure-database.sh +| [password/secret-username-key] [secret-password-key] +| If only _secret-name_ is provided then _secret-username-key_ defaults to `username` and _secret-password-key_ defaults to `password`. + +The following 3 combinations are allowed after the _url_: + +* +* + +* + +| deploy-scdf.sh +| [app-name] +| Deploys the application using the package and `scdf-values.yml` in the current directory. +The default _app-name_ is `scdf-${SCDF_TYPE}`. + +| update-scdf.sh +| [app-name] +| Updated the deployed application using a modified values file. +The default _app-name_ is `scdf-${SCDF_TYPE}`. + +| export-dataflow-ip.sh +| N/A +| Will print the URL to access dataflow. If you use `source ./export-dataflow-ip.sh` it will export `DATAFLOW_URL` to be used by `register-apps.sh` + +| register-apps.sh +| [stream-application-version] +| _broker_ must be one of rabbit or kafka. +_stream-application-version_ is optional and will install the latest version. The latest version is 2021.1.2 + +|=== + +NOTE: Take note that the registration of application in the _pro_ version can take a few minutes since it retrieves all version information and metadata upfront. + +=== Preparation +You will need to prepare a values file named scdf-values.yml +The following steps will provide help. + +==== Prepare Configuration parameters + +Executing the following script will configure the environmental variables needed. + +[source,shell] +.... +source ./carvel/start-deploy.sh [scdf-type] [release|snapshot] +.... + +Where: + +* `broker` is one of rabbitmq or kafka +* `namespace` A valid Kubernetes namespace other than `default` +* `scdf-type` One of oss or pro. oss is the default. +* `release|snapshot` and `scdf-type` will determine the value of `PACKAGE_VERSION`. + +_*The best option to ensure using the type and version of package intended is to modify `deploy/versions.yaml`*_ + +The environmental variables can also be configured manually to override the values. + +[cols="3m,6,2"] +|=== +|Name |Description|Default + +|PACKAGE_VERSION +|Version of Carvel package. +| Release version + +|DATAFLOW_VERSION +|Version of Spring Cloud Data Flow +|2.11.2 + +|DATAFLOW_PRO_VERSION +|Version of Spring Cloud Data Flow Pro +|1.6.1 + +|SKIPPER_VERSION +|Version of Spring Cloud Skipper +|2.11.2 + +|REGISTRY +|Url and repository of package registry. Format ``. This will be used to prefix the carvel repo and package. +| `docker.io/springcloud` + +| BROKER +| One of `kafka` or `rabbitmq` +| `rabbitmq` + +| DATABASE +| One of `mariadb` or `postgresql`. The default is `postgresql`. This will only apply when you `deploy-local-database.sh` +|`postgresql` + +| NS +| A Kubernetes namespace other than `default`. +| `scdf` + +| SCDF_TYPE +| One of `oss` or `pro`. +| `oss` + +|=== + +NOTE: The above environmental variables should only be provided if different from the default in `deploy/versions.yaml` + +==== Prepare Configuration file + +Create a file name `scdf-values.yml` by executing: + +[source,shell] +.... +./carvel/carvel-use-template.sh +.... + +Edit the file as needed to configure the deployment. The `deploy-local-` scripts will + +_Uses scdf-type previously selected._ + +=== Prepare cluster and add repository + +Login to docker and optionally registry.tanzu.vmware.com for Spring Cloud Data Flow Pro. + +[source,shell] +.... +# When deploying SCDF Pro. +export TANZU_DOCKER_USERNAME="" +export TANZU_DOCKER_PASSWORD="" +docker login --username $TANZU_DOCKER_USERNAME --password $TANZU_DOCKER_PASSWORD registry.packages.broadcom.com + +# Always required to ensure you don't experience rate limiting with Docker HUB +export DOCKER_HUB_USERNAME="" +export DOCKER_HUB_PASSWORD="" +docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD index.docker.io +.... + +Install carvel kapp-controller, secretgen-controller and certmanager + +[source,shell] +.... +./carvel/prepare-cluster.sh +.... + +Load scdf repo package for the _scdf-type_ +[source,shell] +.... +./carvel/setup-scdf-repo.sh +.... + +=== Install supporting services + +In a production environment you should be using supported database and broker services or operators along with shared observability tools. + +For local development or demonstration the following can be used to install database, broker and prometheus. + +==== Deploy local database. + +[source,shell] +.... +./carvel/deploy-local-database.sh # <1> +.... +<1> `database` must be one of `postgresql` or `mariadb`. Default is postgresql or configure in `DATABASE` using `start-deploy.sh`. + +NOTE: This script updates `scdf-values.yml` with the correct secret name. + +==== Deploy local message broker. +[source,shell] +.... +./carvel/deploy-local-broker.sh +.... + +==== Deploy local Prometheus and proxy. +[source,shell] +.... +./carvel/deploy-local-prometheus.sh +.... + +_This script also configures the Grafana endpoint in `scdf-values.yml`_ + +=== Configure Prometheus proxy + +In the case where and existing prometheus and prometheus proxy is deployed the proxy can be configured using: + +[source,shell] +.... +./carvel/configure-prometheus-proxy.sh [step] +.... + +=== Deploy Spring Cloud Data Flow + +You can configure the before `register-apps.sh`: + +* `STREAM_APPS_RT_VERSION` Stream Apps Release Train Version. _Default is 2022.0.0_. +* `STREAM_APPS_VERSION` Stream Apps Version. _Default is 4.0.0_. + +[source,shell] +.... +./carvel/deploy-scdf.sh +source ./carvel/export-dataflow-ip.sh +# expected output: Dataflow URL: +./carvel/register-apps.sh +.... + +=== Update deployed application. + +You can modify the values file used during installation and then update the deployment using `./carvel/update-scdf.sh` diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-cloudfoundry.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-cloudfoundry.adoc index 69574cbc08..1d7b91662f 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-cloudfoundry.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-cloudfoundry.adoc @@ -149,13 +149,13 @@ are `http` (the default), `port`, and `none`. You can also set environment variables that specify the HTTP-based health check endpoint and timeout: `SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_HEALTH_CHECK_ENDPOINT` and `SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_HEALTH_CHECK_TIMEOUT`, respectively. These default to `/health` (the Spring Boot default location) and `120` seconds. -* You can also specify deployment properties by using the DSL. For instance, if you want to set the allocated memory for the `http` application to 512m and also bind a mysql service to the `jdbc` application, you can run the following commands: +* You can also specify deployment properties by using the DSL. For instance, if you want to set the allocated memory for the `http` application to 512m and also bind a postgres service to the `jdbc` application, you can run the following commands: [source,bash,subs=attributes] ---- -dataflow:> stream create --name mysqlstream --definition "http | jdbc --tableName=names --columns=name" -dataflow:> stream deploy --name mysqlstream --properties "deployer.http.memory=512, deployer.jdbc.cloudfoundry.services=mysql" +dataflow:> stream create --name postgresstream --definition "http | jdbc --tableName=names --columns=name" +dataflow:> stream deploy --name postgresstream --properties "deployer.http.memory=512, deployer.jdbc.cloudfoundry.services=postgres" ---- @@ -206,7 +206,7 @@ spring: memory: 512m disk: 2048m instances: 4 - services: rabbit,mysql + services: rabbit,postgres appNamePrefix: dev1 qa: connection: @@ -221,7 +221,7 @@ spring: memory: 756m disk: 724m instances: 2 - services: rabbitQA,mysqlQA + services: rabbitQA,postgresQA appNamePrefix: qa1 ---- @@ -287,15 +287,16 @@ cf set-env dataflow-server SPRING_APPLICATION_JSON '{"spring.cloud.dataflow.appl ---- -For Spring Cloud Task apps, you can use something similar to the following, if you use a database service instance named `mysql`: +For Spring Cloud Task apps, you can use something similar to the following, if you use a database service instance named `postgres`: [source,bash,subs=attributes] ---- -cf set-env SPRING_DATASOURCE_URL '${vcap.services.mysql.credentials.jdbcUrl}' -cf set-env SPRING_DATASOURCE_USERNAME '${vcap.services.mysql.credentials.username}' -cf set-env SPRING_DATASOURCE_PASSWORD '${vcap.services.mysql.credentials.password}' +cf set-env SPRING_DATASOURCE_URL '${vcap.services.postgres.credentials.jdbcUrl}' +cf set-env SPRING_DATASOURCE_USERNAME '${vcap.services.postgres.credentials.username}' +cf set-env SPRING_DATASOURCE_PASSWORD '${vcap.services.postgres.credentials.password}' cf set-env SPRING_DATASOURCE_DRIVER_CLASS_NAME 'org.mariadb.jdbc.Driver' +cf set-env SPRING_JPA_DATABASE_PLATFORM 'org.hibernate.dialect.MariaDB106Dialect' ---- @@ -322,23 +323,23 @@ SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_US When deploying streams in Cloud Foundry, you can take advantage of application-specific service bindings, so not all services are globally configured for all the apps orchestrated by Spring Cloud Data Flow. -For instance, if you want to provide a `mysql` service binding only for the `jdbc` application in the following stream +For instance, if you want to provide a `postgres` service binding only for the `jdbc` application in the following stream definition, you can pass the service binding as a deployment property: [source,bash,subs=attributes] ---- dataflow:>stream create --name httptojdbc --definition "http | jdbc" -dataflow:>stream deploy --name httptojdbc --properties "deployer.jdbc.cloudfoundry.services=mysqlService" +dataflow:>stream deploy --name httptojdbc --properties "deployer.jdbc.cloudfoundry.services=postgresService" ---- -where `mysqlService` is the name of the service specifically bound only to the `jdbc` application and the `http` +where `postgresService` is the name of the service specifically bound only to the `jdbc` application and the `http` application does not get the binding by this method. If you have more than one service to bind, they can be passed as comma-separated items -(for example: `deployer.jdbc.cloudfoundry.services=mysqlService,someService`). +(for example: `deployer.jdbc.cloudfoundry.services=postgresService,someService`). [[configure-service-binding-parameters]] === Configuring Service binding parameters @@ -372,7 +373,7 @@ Since a comma is also used to separate configuration parameters, and to avoid wh [source] ---- -rabbitmq,'nfs_service_instance uid:1000,gid:1000,mount:/var/volume1,readonly:true',mysql,'my-google-bigquery-example role:bigquery.user' +rabbitmq,'nfs_service_instance uid:1000,gid:1000,mount:/var/volume1,readonly:true',postgres,'my-google-bigquery-example role:bigquery.user' ---- [TIP] @@ -505,10 +506,6 @@ The following example shows how to deploy the `http` health check type to an end Though we recommend using a Maven Artifactory for application <>, there might be situations where one of the following alternative approaches would make sense. -* We have custom-built and maintain a link:https://github.com/spring-cloud-stream-app-starters/scdf-app-tool[SCDF APP Tool] -that can run as a regular Spring Boot application in Cloud Foundry, but it will in turn host and serve the application -JARs for SCDF at runtime. - * With the help of Spring Boot, we can serve link:https://docs.spring.io/spring-boot/docs/current/reference/html/boot-features-developing-web-applications.html#boot-features-spring-mvc-static-content[static content] in Cloud Foundry. A simple Spring Boot application can bundle all the required stream and task applications. By having it run on Cloud Foundry, the static application can then serve the über-jar's. From the shell, you can, for example, register the @@ -742,17 +739,17 @@ logging.level.cloudfoundry-client == DEBUG === Spring Cloud Config Server -You can use Spring Cloud Config Server to centralize configuration properties for Spring Boot applications. Likewise, -both Spring Cloud Data Flow and the applications orchestrated by Spring Cloud Data Flow can be integrated with -a configuration server to use the same capabilities. +You can use Spring Cloud Config Server to centralize configuration properties for Spring Boot applications. +Likewise, both Spring Cloud Data Flow and the applications orchestrated by Spring Cloud Data Flow can be integrated with a configuration server to use the same capabilities. ==== Stream, Task, and Spring Cloud Config Server Similar to Spring Cloud Data Flow server, you can configure both the stream and task applications to resolve the centralized properties from the configuration server. Setting the `spring.cloud.config.uri` property for the deployed applications is a common way to bind to the configuration server. See the link:https://cloud.spring.io/spring-cloud-config/spring-cloud-config.html#_spring_cloud_config_client[Spring Cloud Config Client] reference guide for more information. -Since this property is likely to be used across all applications deployed by the Data Flow server, the Data Flow server's `spring.cloud.dataflow.applicationProperties.stream` property for stream applications and `spring.cloud.dataflow.applicationProperties.task` property for task applications can be used to pass the `uri` of the Config Server to each deployed stream or task application. See the section on <> for more information. -Note that, if you use applications from the link:https://cloud.spring.io/spring-cloud-stream-app-starters/[App Starters project], these applications already embed the `spring-cloud-services-starter-config-client` dependency. +Since this property is likely to be used across all deployed applications, the Data Flow server's `spring.cloud.dataflow.applicationProperties.stream` property for stream applications and `spring.cloud.dataflow.applicationProperties.task` property for task applications can be used to pass the `uri` of the Config Server to each deployed stream or task application. See the section on <> for more information. + +Note that, if you use the out-of-the-box link:https://spring.io/projects/spring-cloud-stream-applications/[Stream Applications], these applications already embed the `spring-cloud-services-starter-config-client` dependency. If you build your application from scratch and want to add the client side support for config server, you can add a dependency reference to the config server client library. The following snippet shows a Maven example: @@ -780,10 +777,9 @@ If you know that you are not using config server functionality, you can disable The following SCDF and Skipper `manifest.yml` templates includes the required environment variables for the Skipper and Spring Cloud Data Flow server and deployed applications and tasks to successfully run on Cloud Foundry and automatically resolve centralized properties from `my-config-server` at runtime: - -[source,yml] +.SCDF manifest.yml +[source,yaml] ---- ---- applications: - name: data-flow-server host: data-flow-server @@ -793,7 +789,7 @@ applications: path: {PATH TO SERVER UBER-JAR} env: SPRING_APPLICATION_NAME: data-flow-server - MAVEN_REMOTE_REPOSITORIES_REPO1_URL: https://repo.spring.io/libs-snapshot + MAVEN_REMOTEREPOSITORIES_REPO1_URL: https://my.custom.repo/prod-repo SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_URL: https://api.sys.huron.cf-app.com SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_ORG: sabby20 SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_SPACE: sabby20 @@ -801,13 +797,16 @@ applications: SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_USERNAME: admin SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_PASSWORD: *** SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_SKIP_SSL_VALIDATION: true - SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_SERVICES: mysql + SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_SERVICES: postgres SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI: https:///api services: -- mysql +- postgres - my-config-server +---- ---- +.Skipper manifest.yml +[source,yaml] +---- applications: - name: skipper-server host: skipper-server @@ -829,11 +828,10 @@ applications: SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_PASSWORD: admin SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_SKIP_SSL_VALIDATION: false SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_DELETE_ROUTES: false - SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_SERVICES: rabbit, my-config-server + SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_SERVICES: rabbit,my-config-server services: -- mysql +- postgres my-config-server - ---- where `my-config-server` is the name of the Spring Cloud Config Service instance running on Cloud Foundry. @@ -857,7 +855,6 @@ get their configuration from the `my-config-server` Cloud Config server (deploye [source,yml,options="wrap"] ---- ---- applications: - name: test-server host: test-server @@ -867,7 +864,7 @@ applications: path: spring-cloud-dataflow-server-VERSION.jar env: SPRING_APPLICATION_NAME: test-server - MAVEN_REMOTE_REPOSITORIES_REPO1_URL: https://repo.spring.io/libs-snapshot + MAVEN_REMOTEREPOSITORIES_REPO1_URL: https://my.custom.repo/prod-repo SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_URL: https://api.sys.huron.cf-app.com SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_ORG: sabby20 SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_SPACE: sabby20 @@ -875,22 +872,19 @@ applications: SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_USERNAME: admin SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_PASSWORD: *** SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_SKIP_SSL_VALIDATION: true - SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_SERVICES: mysql, config-server + SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_SERVICES: postgres, config-server SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI: https:///api TRUST_CERTS: #this is for the server SPRING_CLOUD_DATAFLOW_APPLICATION_PROPERTIES_TASK_TRUST_CERTS: #this propagates to all tasks services: -- mysql +- postgres - my-config-server #this is for the server ---- - Also add the `my-config-server` service to the Skipper's manifest environment - [source,yml] ---- ---- applications: - name: skipper-server host: skipper-server @@ -912,9 +906,8 @@ applications: SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_PASSWORD: SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_SERVICES: rabbit, my-config-server #this is so all stream applications bind to my-config-server services: -- mysql +- postgres my-config-server - ---- @@ -945,7 +938,6 @@ The following sample manifest shows both environment properties configured (assu [source,yml] ---- ---- applications: - name: data-flow-server host: data-flow-server @@ -967,7 +959,7 @@ applications: SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI: https:///api SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_SCHEDULER_SCHEDULER_URL: https://scheduler.local.pcfdev.io services: -- mysql +- postgres ---- Where the `SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_SCHEDULER_SCHEDULER_URL` has the following format: `scheduler.` (for diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-database.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-database.adoc new file mode 100644 index 0000000000..f53ef92687 --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-database.adoc @@ -0,0 +1,88 @@ + +[[configuration-database-overview]] +A relational database is used to store stream and task definitions as well as the state of executed tasks. +Spring Cloud Data Flow provides schemas for *MariaDB*, *MySQL*, *Oracle*, *PostgreSQL*, *Db2*, *SQL Server*, and *H2*. The schema is automatically created when the server starts. + +NOTE: The JDBC drivers for *MariaDB*, *MySQL* (via the _MariaDB_ driver), *PostgreSQL*, *SQL Server* are available without additional configuration. To use any other database you need to put the corresponding JDBC driver jar on the classpath of the server as described <<#add-custom-driver,here>>. + +To configure a database the following properties must be set: + +* `spring.datasource.url` +* `spring.datasource.username` +* `spring.datasource.password` +* `spring.datasource.driver-class-name` +* `spring.jpa.database-platform` + +The `username` and `password` are the same regardless of the database. However, the `url` and `driver-class-name` vary per database as follows. + +[frame="none"] +[cols="a,a,a,a,a"] +[cols="10%,20%,20%,20%,10%"] +|=== +|[.small]#Database#|[.small]#spring.datasource.url#|[.small]#spring.datasource.driver-class-name#|[.small]#spring.jpa.database-platform#|[.small]#Driver included# + +|[.small]#MariaDB 10.0 - 10.1# +|[.small]#jdbc:mariadb://${db-hostname}:${db-port}/${db-name}# +|[.small]#org.mariadb.jdbc.Driver# +|[.small]#org.hibernate.dialect.MariaDB10Dialect# +|[.small]#Yes# + +|[.small]#MariaDB 10.2# +|[.small]#jdbc:mariadb://${db-hostname}:${db-port}/${db-name}# +|[.small]#org.mariadb.jdbc.Driver# +|[.small]#org.hibernate.dialect.MariaDB102Dialect# +|[.small]#Yes# + +|[.small]#MariaDB 10.3 - 10.5# +|[.small]#jdbc:mariadb://${db-hostname}:${db-port}/${db-name}# +|[.small]#org.mariadb.jdbc.Driver# +|[.small]#org.hibernate.dialect.MariaDB103Dialect# +|[.small]#Yes# + +|[.small]#MariaDB 10.6+# +|[.small]#jdbc:mariadb://${db-hostname}:${db-port}/${db-name}# +|[.small]#org.mariadb.jdbc.Driver# +|[.small]#org.hibernate.dialect.MariaDB106Dialect#{empty}footnote:[If the database was migrated from MySQL and uses sequence tables use `org.hibernate.dialectMariaDB102Dialect`. Please note that Hibernate selects incorrect Dialect when using MariaDB 11.] +|[.small]#Yes# + +|[.small]#MySQL 5.7# +|[.small]#jdbc:mysql://${db-hostname}:${db-port}/${db-name}?permitMysqlScheme# +|[.small]#org.mariadb.jdbc.Driver# +|[.small]#org.hibernate.dialect.MySQL57Dialect# +|[.small]#Yes# + +|[.small]#MySQL 8.0+# +|[.small]#jdbc:mysql://${db-hostname}:${db-port}/${db-name}?allowPublicKeyRetrieval=true&useSSL=false&autoReconnect=true&permitMysqlScheme#{empty}footnote:[SSL is disabled in this example, adjust accordingly for your environment and requirements] +|[.small]#org.mariadb.jdbc.Driver# +|[.small]#org.hibernate.dialect.MySQL8Dialect# +|[.small]#Yes# + +|[.small]#PostgresSQL# +|[.small]#jdbc:postgres://${db-hostname}:${db-port}/${db-name}# +|[.small]#org.postgresql.Driver# +|[.small]#Remove for Hibernate default# +|[.small]#Yes# + +|[.small]#SQL Server# +|[.small]#jdbc:sqlserver://${db-hostname}:${db-port};databasename=${db-name}&encrypt=false# +|[.small]#com.microsoft.sqlserver.jdbc.SQLServerDriver# +|[.small]#Remove for Hibernate default# +|[.small]#Yes# + +|[.small]#DB2# +|[.small]#jdbc:db2://${db-hostname}:${db-port}/{db-name}# +|[.small]#com.ibm.db2.jcc.DB2Driver# +|[.small]#Remove for Hibernate default# +|[.small]#No# + +|[.small]#Oracle# +|[.small]#jdbc:oracle:thin:@${db-hostname}:${db-port}/{db-name}# +|[.small]#oracle.jdbc.OracleDriver# +|[.small]#Remove for Hibernate default# +|[.small]#No# +|=== + +==== H2 +When no other database is configured then Spring Cloud Data Flow uses an embedded instance of the *H2* database as the default. + +NOTE: *H2* is good for development purposes but is not recommended for production use nor is it supported as an external mode. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-kubernetes-app-properties.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-kubernetes-app-properties.adoc new file mode 100644 index 0000000000..8aa0be731a --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-kubernetes-app-properties.adoc @@ -0,0 +1,1173 @@ + +=== Application and Server Properties + +This section covers how you can customize the deployment of your applications. You can use a number of properties to influence settings for the applications that are deployed. Properties can be applied on a per-application basis or in the appropriate server configuration for all deployed applications. + +NOTE: Properties set on a per-application basis always take precedence over properties set as the server configuration. This arrangement lets you override global server level properties on a per-application basis. + +Properties to be applied for all deployed Tasks are defined in the `src/kubernetes/server/server-config-[binder].yaml` file and for Streams in `src/kubernetes/skipper/skipper-config-[binder].yaml`. Replace `[binder]` with the messaging middleware you are using -- for example, `rabbit` or `kafka`. + +==== Memory and CPU Settings + +Applications are deployed with default memory and CPU settings. If you need to, you can adjust these values. The following example shows how to set `Limits` to `1000m` for `CPU` and `1024Mi` for memory and `Requests` to `800m` for CPU and `640Mi` for memory: + +==== +[source] +---- +deployer..kubernetes.limits.cpu=1000m +deployer..kubernetes.limits.memory=1024Mi +deployer..kubernetes.requests.cpu=800m +deployer..kubernetes.requests.memory=640Mi +---- +==== + +Those values results in the following container settings being used: + +==== +[source] +---- +Limits: + cpu: 1 + memory: 1Gi +Requests: + cpu: 800m + memory: 640Mi +---- +==== + +You can also control the default values to which to set the `cpu` and `memory` globally. + +The following example shows how to set the CPU and memory for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + limits: + memory: 640mi + cpu: 500m +---- +==== + +The following example shows how to set the CPU and memory for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + limits: + memory: 640mi + cpu: 500m +---- +==== + +The settings we have used so far affect only the settings for the container. They do not affect the memory setting for the JVM process in the container. If you would like to set JVM memory settings, you can set an environment variable to do so. See the next section for details. + +==== Environment Variables + +To influence the environment settings for a given application, you can use the `spring.cloud.deployer.kubernetes.environmentVariables` deployer property. +For example, a common requirement in production settings is to influence the JVM memory arguments. +You can do so by using the `JAVA_TOOL_OPTIONS` environment variable, as the following example shows: + +==== +[source] +---- +deployer..kubernetes.environmentVariables=JAVA_TOOL_OPTIONS=-Xmx1024m +---- +==== + +NOTE: The `environmentVariables` property accepts a comma-delimited string. If an environment variable contains a value +that is also a comma-delimited string, it must be enclosed in single quotation marks -- for example, +`spring.cloud.deployer.kubernetes.environmentVariables=spring.cloud.stream.kafka.binder.brokers='somehost:9092, +anotherhost:9093'` + +This overrides the JVM memory setting for the desired `` (replace `` with the name of your application). + +[[getting-started-kubernetes-probes]] +==== Liveness, Readiness and Startup Probes + +The `liveness` and `readiness` probes use paths called `/health/liveness` and `/health/readiness`, respectively. They use a `delay` of `1` for both and a `period` of `60` and `10` respectively. You can change these defaults when you deploy the stream by using deployer properties. The liveness and readiness probes are applied only to streams. + +The `startup` probe will use the `/health` path and a delay of 30 and period for 3 with a failure threshold of 20 times before the container restarts the application. + +The following example changes the `liveness` and `startup` probes (replace `` with the name of your application) by setting deployer properties: + +==== +[source] +---- +deployer..kubernetes.livenessProbePath=/health/livesness +deployer..kubernetes.livenessProbeDelay=1 +deployer..kubernetes.livenessProbePeriod=60 +deployer..kubernetes.livenessProbeSuccess=1 +deployer..kubernetes.livenessProbeFailure=3 +deployer..kubernetes.readinessProbePath=/health/readiness +deployer..kubernetes.readinessProbeDelay=1 +deployer..kubernetes.readinessProbePeriod=60 +deployer..kubernetes.readinessProbeSuccess=1 +deployer..kubernetes.readinessProbeFailure=3 +deployer..kubernetes.startupHttpProbePath=/health +deployer..kubernetes.startupProbeDelay=20 +deployer..kubernetes.startupProbeSuccess=1 +deployer..kubernetes.startupProbeFailure=30 +deployer..kubernetes.startupProbePeriod=5 +deployer..kubernetes.startupProbeTimeout=3 +---- +==== + +You can declare the same as part of the server global configuration for streams, as the following example shows: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + livenessHttpProbePath: /health/liveness + livenessProbeDelay: 1 + livenessProbePeriod: 60 + livenessProbeSuccess: 1 + livenessProbeFailure: 3 + startupHttpProbePath: /health + startupProbeDelay: 20 + startupProbeSuccess: 1 + startupProbeFailure: 30 + startupProbePeriod: 5 + startupProbeTimeout: 3 +---- +==== + +Similarly, you can swap `liveness` for `readiness` to override the default `readiness` settings. + +By default, port 8080 is used as the probe port. You can change the defaults for both `liveness` and `readiness` probe ports by using deployer properties, as the following example shows: + +==== +[source] +---- +deployer..kubernetes.readinessProbePort=7000 +deployer..kubernetes.livenessProbePort=7000 +deployer..kubernetes.startupProbePort=7000 +---- +==== + +You can declare the same as part of the global configuration for streams, as the following example shows: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + readinessProbePort: 7000 + livenessProbePort: 7000 + startupProbePort: 7000 +---- +==== + +[NOTE] +===== +By default, the `liveness` and `readiness` probe paths use Spring Boot 2.x+ actuator endpoints. To use Spring Boot 1.x actuator endpoint paths, you must adjust the `liveness` and `readiness` values, as the following example shows (replace `` with the name of your application): + +The `startup` probe path will default to the management path `/info` but may be modified as needed. + +==== +[source] +---- +deployer..kubernetes.startupProbePath=/api +---- +==== + +To automatically set both `liveness` and `readiness` endpoints on a per-application basis to the default Spring Boot 1.x paths, you can set the following property: + +==== +[source] +---- +deployer..kubernetes.bootMajorVersion=1 +---- +==== + +===== + +You can access secured probe endpoints by using credentials stored in a https://kubernetes.io/docs/concepts/configuration/secret/[Kubernetes secret]. You can use an existing secret, provided the credentials are contained under the `credentials` key name of the secret's `data` block. You can configure probe authentication on a per-application basis. When enabled, it is applied to both the `liveness` and `readiness` probe endpoints by using the same credentials and authentication type. Currently, only `Basic` authentication is supported. + +To create a new secret: + +. Generate the base64 string with the credentials used to access the secured probe endpoints. ++ +Basic authentication encodes a username and a password as a base64 string in the format of `username:password`. ++ +The following example (which includes output and in which you should replace `user` and `pass` with your values) shows how to generate a base64 string: ++ +==== +[source,shell] +---- +$ echo -n "user:pass" | base64 +dXNlcjpwYXNz +---- +==== + +. With the encoded credentials, create a file (for example, `myprobesecret.yml`) with the following contents: ++ +==== +[source] +---- +apiVersion: v1 +kind: Secret +metadata: + name: myprobesecret +type: Opaque +data: + credentials: GENERATED_BASE64_STRING +---- +==== + +. Replace `GENERATED_BASE64_STRING` with the base64-encoded value generated earlier. + +. Create the secret by using `kubectl`, as the following example shows: ++ +==== +[source,shell] +---- +$ kubectl create -f ./myprobesecret.yml +secret "myprobesecret" created +---- +==== + +. Set the following deployer properties to use authentication when accessing probe endpoints, as the following example shows: ++ +==== +[source] +---- +deployer..kubernetes.probeCredentialsSecret=myprobesecret +---- +==== ++ +Replace `` with the name of the application to which to apply authentication. + +==== Using `SPRING_APPLICATION_JSON` + +You can use a `SPRING_APPLICATION_JSON` environment variable to set Data Flow server properties (including the configuration of Maven repository settings) that are common across all of the Data Flow server implementations. These settings go at the server level in the container `env` section of a deployment YAML. The following example shows how to do so: + +==== +[source,options=nowrap] +---- +env: +- name: SPRING_APPLICATION_JSON + value: "{ \"maven\": { \"local-repository\": null, \"remote-repositories\": { \"repo1\": { \"url\": \"https://my.custom.repo/prod-repo\"} } } }" +---- +==== + +==== Private Docker Registry + +You can pull Docker images from a private registry on a per-application basis. First, you must create a secret in the cluster. Follow the https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/[Pull an Image from a Private Registry] guide to create the secret. + +Once you have created the secret, you can use the `imagePullSecret` property to set the secret to use, as the following example shows: + +==== +[source] +---- +deployer..kubernetes.imagePullSecret=mysecret +---- +==== + +Replace `` with the name of your application and `mysecret` with the name of the secret you created earlier. + +You can also configure the image pull secret at the global server level. + +The following example shows how to do so for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + imagePullSecret: mysecret +---- +==== + +The following example shows how to do so for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + imagePullSecret: mysecret +---- +==== + +Replace `mysecret` with the name of the secret you created earlier. + +==== Annotations + +You can add annotations to Kubernetes objects on a per-application basis. The supported object types are pod `Deployment`, `Service`, and `Job`. Annotations are defined in a `key:value` format, allowing for multiple annotations separated by a comma. For more information and use cases on annotations, see https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/[Annotations]. + +The following example shows how you can configure applications to use annotations: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.podAnnotations=annotationName:annotationValue +deployer..kubernetes.serviceAnnotations=annotationName:annotationValue,annotationName2:annotationValue2 +deployer..kubernetes.jobAnnotations=annotationName:annotationValue +---- +==== + +Replace `` with the name of your application and the value of your annotations. + +==== Entry Point Style + +An entry point style affects how application properties are passed to the container to be deployed. Currently, three styles are supported: + +* `exec` (default): Passes all application properties and command line arguments in the deployment request as container arguments. Application properties are transformed into the format of `--key=value`. +* `shell`: Passes all application properties and command line arguments as environment variables. Each of the applicationor command-line argument properties is transformed into an uppercase string and `.` characters are replaced with `_`. +* `boot`: Creates an environment variable called `SPRING_APPLICATION_JSON` that contains a JSON representation of all application properties. Command line arguments from the deployment request are set as container args. + +NOTE: In all cases, environment variables defined at the server-level configuration and on a per-application basis are sent on to the container as is. + +You can configure an application as follows: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.entryPointStyle= +---- +==== + +Replace `` with the name of your application and `` with your desired entry point style. + +You can also configure the entry point style at the global server level. + +The following example shows how to do so for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + entryPointStyle: entryPointStyle +---- +==== + +The following example shows how to do so for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + entryPointStyle: entryPointStyle +---- +==== + +Replace `entryPointStyle` with the desired entry point style. + +You should choose an Entry Point Style of either `exec` or `shell`, to correspond to how the `ENTRYPOINT` syntax is defined in the container's `Dockerfile`. For more information and uses cases on `exec` versus `shell`, see the https://docs.docker.com/engine/reference/builder/#entrypoint[ENTRYPOINT] section of the Docker documentation. + +Using the `boot` entry point style corresponds to using the `exec` style `ENTRYPOINT`. Command line arguments from the deployment request are passed to the container, with the addition of application properties being mapped into the `SPRING_APPLICATION_JSON` environment variable rather than command line arguments. + +NOTE: When you use the `boot` Entry Point Style, the `deployer..kubernetes.environmentVariables` property must not contain `SPRING_APPLICATION_JSON`. + +==== Deployment Service Account + +You can configure a custom service account for application deployments through properties. You can use an existing service account or create a new one. One way to create a service account is by using `kubectl`, as the following example shows: + +==== +[source,shell] +---- +$ kubectl create serviceaccount myserviceaccountname +serviceaccount "myserviceaccountname" created +---- +==== + +Then you can configure individual applications as follows: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.deploymentServiceAccountName=myserviceaccountname +---- +==== + +Replace `` with the name of your application and `myserviceaccountname` with your service account name. + +You can also configure the service account name at the global server level. + +The following example shows how to do so for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + deploymentServiceAccountName: myserviceaccountname +---- +==== + +The following example shows how to do so for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + deploymentServiceAccountName: myserviceaccountname +---- +==== + +Replace `myserviceaccountname` with the service account name to be applied to all deployments. + +==== Image Pull Policy + +An image pull policy defines when a Docker image should be pulled to the local registry. Currently, three policies are supported: + +* `IfNotPresent` (default): Do not pull an image if it already exists. +* `Always`: Always pull the image regardless of whether it already exists. +* `Never`: Never pull an image. Use only an image that already exists. + +The following example shows how you can individually configure applications: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.imagePullPolicy=IfNotPresent +---- +==== + +Replace `` with the name of your application and `Always` with your desired image pull policy. + +You can configure an image pull policy at the global server level. + +The following example shows how to do so for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + imagePullPolicy: IfNotPresent +---- +==== + +The following example shows how to do so for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + imagePullPolicy: Always +---- +==== + +Replace `Always` with your desired image pull policy. + +==== Deployment Labels + +You can set custom labels on objects related to https://kubernetes.io/docs/concepts/workloads/controllers/deployment/[Deployment]. See https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/[Labels] for more information on labels. Labels are specified in `key:value` format. + +The following example shows how you can individually configure applications: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.deploymentLabels=myLabelName:myLabelValue +---- +==== + +Replace `` with the name of your application, `myLabelName` with your label name, and `myLabelValue` with the value of your label. + +Additionally, you can apply multiple labels, as the following example shows: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.deploymentLabels=myLabelName:myLabelValue,myLabelName2:myLabelValue2 +---- +==== + +==== Tolerations + +Tolerations work with taints to ensure pods are not scheduled onto particular nodes. +Tolerations are set into the pod configuration while taints are set onto nodes. +See the https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/[Taints and Tolerations] section of the Kubernetes reference for more information. + +The following example shows how you can individually configure applications: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.tolerations=[{key: 'mykey', operator: 'Equal', value: 'myvalue', effect: 'NoSchedule'}] +---- +==== + +Replace `` with the name of your application and the key-value pairs according to your desired toleration configuration. + +You can configure tolerations at the global server level as well. + +The following example shows how to do so for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + tolerations: + - key: mykey + operator: Equal + value: myvalue + effect: NoSchedule +---- +==== + +The following example shows how to do so for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + tolerations: + - key: mykey + operator: Equal + value: myvalue + effect: NoSchedule +---- +==== + +Replace the `tolerations` key-value pairs according to your desired toleration configuration. + +==== Secret References + +Secrets can be referenced and their entire data contents can be decoded and inserted into the pod environment as individual variables. +See the https://kubernetes.io/docs/tasks/inject-data-application/distribute-credentials-secure/#configure-all-key-value-pairs-in-a-secret-as-container-environment-variables[Configure all key-value pairs in a Secret as container environment variables] section of the Kubernetes reference for more information. + +The following example shows how you can individually configure applications: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.secretRefs=testsecret +---- +==== + +You can also specify multiple secrets, as follows: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.secretRefs=[testsecret,anothersecret] +---- +==== + +Replace `` with the name of your application and the `secretRefs` attribute with the appropriate values for your application environment and secret. + +You can configure secret references at the global server level as well. + +The following example shows how to do so for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + secretRefs: + - testsecret + - anothersecret +---- +==== + +The following example shows how to do so for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + secretRefs: + - testsecret + - anothersecret +---- +==== + +Replace the items of `secretRefs` with one or more secret names. + +==== Secret Key References + +Secrets can be referenced and their decoded value can be inserted into the pod environment. +See the https://kubernetes.io/docs/concepts/configuration/secret/#using-secrets-as-environment-variables[Using Secrets as Environment Variables] section of the Kubernetes reference for more information. + +The following example shows how you can individually configure applications: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.secretKeyRefs=[{envVarName: 'MY_SECRET', secretName: 'testsecret', dataKey: 'password'}] +---- +==== + +Replace `` with the name of your application and the `envVarName`, `secretName`, and `dataKey` attributes with the appropriate values for your application environment and secret. + +You can configure secret key references at the global server level as well. + +The following example shows how to do so for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + secretKeyRefs: + - envVarName: MY_SECRET + secretName: testsecret + dataKey: password +---- +==== + +The following example shows how to do so for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + secretKeyRefs: + - envVarName: MY_SECRET + secretName: testsecret + dataKey: password +---- +==== + +Replace the `envVarName`, `secretName`, and `dataKey` attributes with the appropriate values for your secret. + +==== ConfigMap References + +A ConfigMap can be referenced and its entire data contents can be decoded and inserted into the pod environment as individual variables. +See the https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#configure-all-key-value-pairs-in-a-configmap-as-container-environment-variables[Configure all key-value pairs in a ConfigMap as container environment variables] section of the Kubernetes reference for more information. + +The following example shows how you can individually configure applications: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.configMapRefs=testcm +---- +==== + +You can also specify multiple ConfigMap instances, as follows: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.configMapRefs=[testcm,anothercm] +---- +==== + +Replace `` with the name of your application and the `configMapRefs` attribute with the appropriate values for your application environment and ConfigMap. + +You can configure ConfigMap references at the global server level as well. + +The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + configMapRefs: + - testcm + - anothercm +---- +==== + +The following example shows how to do so for tasks by editing the `server-config.yaml` file: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + configMapRefs: + - testcm + - anothercm +---- +==== + +Replace the items of `configMapRefs` with one or more secret names. + +==== ConfigMap Key References + +A ConfigMap can be referenced and its associated key value inserted into the pod environment. +See the https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#define-container-environment-variables-using-configmap-data[Define container environment variables using ConfigMap data] section of the Kubernetes reference for more information. + +The following example shows how you can individually configure applications: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.configMapKeyRefs=[{envVarName: 'MY_CM', configMapName: 'testcm', dataKey: 'platform'}] +---- +==== + +Replace `` with the name of your application and the `envVarName`, `configMapName`, and `dataKey` attributes with the appropriate values for your application environment and ConfigMap. + +You can configure ConfigMap references at the global server level as well. + +The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + configMapKeyRefs: + - envVarName: MY_CM + configMapName: testcm + dataKey: platform +---- +==== + +The following example shows how to do so for tasks by editing the `server-config.yaml` file: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + configMapKeyRefs: + - envVarName: MY_CM + configMapName: testcm + dataKey: platform +---- +==== + +Replace the `envVarName`, `configMapName`, and `dataKey` attributes with the appropriate values for your ConfigMap. + +==== Pod Security Context +The pod https://kubernetes.io/docs/tasks/configure-pod-container/security-context/[security context] specifies security settings for a pod and its containers. + +The configurable options are listed <> +[.small]#(more details for each option can be found in the https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.24/#podsecuritycontext-v1-core[Pod Security Context] section of the Kubernetes API reference)#. + +The following example shows how you can configure the security context for an individual application pod: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.podSecurityContext={runAsUser: 65534, fsGroup: 65534, supplementalGroups: [65534, 65535], seccompProfile: { type: 'RuntimeDefault' }} +---- +==== + +Replace `` with the name of your application and any attributes with the appropriate values for your container environment. + +You can configure the pod security context at the global server level as well. +The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + podSecurityContext: + runAsUser: 65534 + fsGroup: 65534 + supplementalGroups: [65534,65535] + seccompProfile: + type: Localhost + localhostProfile: my-profiles/profile-allow.json +---- +==== + +The following example shows how to do so for tasks by editing the `server-config.yaml` file: +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + podSecurityContext: + runAsUser: 65534 + fsGroup: 65534 + supplementalGroups: [65534,65535] + seccompProfile: + type: Localhost + localhostProfile: my-profiles/profile-allow.json +---- +==== + +Adjust the `podSecurityContext` attributes with the appropriate values for your container environment. + + +==== Container Security Context +The container https://kubernetes.io/docs/tasks/configure-pod-container/security-context/[security context] specifies security settings for an individual container. + +The configurable options are listed <> +[.small]#(more details for each option can be found in the https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.24/#securitycontext-v1-core[Container Security Context] section of the Kubernetes API reference#). + +NOTE: The container security context is applied to all containers in your deployment unless they have their own security already explicitly defined, including regular init containers, stateful set init containers, and additional containers. + +The following example shows how you can configure the security context for containers in an individual application pod: +==== +[source,options=nowrap] +---- +deployer..kubernetes.containerSecurityContext={allowPrivilegeEscalation: true, runAsUser: 65534} +---- +==== + +Replace `` with the name of your application and any attributes with the appropriate values for your container environment. + +You can configure the container security context at the global server level as well. +The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + containerSecurityContext: + allowPrivilegeEscalation: true + runAsUser: 65534 +---- +==== + +The following example shows how to do so for tasks by editing the `server-config.yaml` file: +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + containerSecurityContext: + allowPrivilegeEscalation: true + runAsUser: 65534 +---- +==== + +Adjust the `containerSecurityContext` attributes with the appropriate values for your container environment. + + +==== Service Ports + +When you deploy applications, a kubernetes Service object is created with a default port of `8080`. If the `server.port` property is set, it overrides the default port value. You can add additional ports to the Service object on a per-application basis. You can add multiple ports with a comma delimiter. + +The following example shows how you can configure additional ports on a Service object for an application: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.servicePorts=5000 +deployer..kubernetes.servicePorts=5000,9000 +---- +==== + +Replace `` with the name of your application and the value of your ports. + +==== StatefulSet Init Container + +When deploying an application by using a StatefulSet, an Init Container is used to set the instance index in the pod. +By default, the image used is `busybox`, which you can be customize. + +The following example shows how you can individually configure application pods: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.statefulSetInitContainerImageName=myimage:mylabel +---- +==== + +Replace `` with the name of your application and the `statefulSetInitContainerImageName` attribute with the appropriate value for your environment. + +You can configure the StatefulSet Init Container at the global server level as well. + +The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + statefulSetInitContainerImageName: myimage:mylabel +---- +==== + +The following example shows how to do so for tasks by editing the `server-config.yaml` file: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + statefulSetInitContainerImageName: myimage:mylabel +---- +==== + +Replace the `statefulSetInitContainerImageName` attribute with the appropriate value for your environment. + +==== Init Containers + +When you deploy applications, you can set a custom Init Container on a per-application basis. +Refer to the https://kubernetes.io/docs/concepts/workloads/pods/init-containers/[Init Containers] section of the Kubernetes reference for more information. + +The following example shows how you can configure an Init Container or multiple Init Containers for an application: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.initContainer={containerName: 'test', imageName: 'busybox:latest', commands: ['sh', '-c', 'echo hello']} +# alternative for multiple init containers +deployer..kubernetes.initContainers=[{containerName:'test', imageName: 'busybox:latest', commands: ['sh', '-c', 'echo hello']}, {containerName:'test2', imageName:'busybox:latest', commands:['sh', '-c', 'echo world']}] +# multiple containers can be created inidividually +deployer..kubernetes.initContainers[0]={containerName:'test', imageName:'busybox:latest', commands:['sh', '-c', 'echo hello']} +deployer..kubernetes.initContainers[1]={containerName:'test2', imageName:'busybox:latest', commands:['sh', '-c', 'echo world']} +---- +==== + +Replace `` with the name of your application and set the values of the `initContainer` attributes appropriate for your Init Container. + +==== Lifecycle Support + +When you deploy applications, you may attach `postStart` and `preStop` https://kubernetes.io/docs/tasks/configure-pod-container/attach-handler-lifecycle-event/[Lifecycle handlers] to execute commands. +The Kubernetes API supports other types of handlers besides `exec`. This feature may be extended to support additional actions in a future release. +To configure the Lifecycle handlers as shown in the linked page above,specify each command as a comma-delimited list, using the following property keys: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.lifecycle.postStart.exec.command=/bin/sh,-c,'echo Hello from the postStart handler > /usr/share/message' +deployer..kubernetes.lifecycle.preStop.exec.command=/bin/sh,-c,'nginx -s quit; while killall -0 nginx; do sleep 1; done' +---- +==== + +==== Additional Containers + +When you deploy applications, you may need one or more containers to be deployed along with the main container. +This would allow you to adapt some deployment patterns such as sidecar, adapter in case of multi container pod setup. + +The following example shows how you can configure additional containers for an application: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.additionalContainers=[{name: 'c1', image: 'busybox:1', command: ['sh', '-c', 'echo hello1'], volumeMounts: [{name: 'test-volume', mountPath: '/tmp', readOnly: true}]},{name: 'c2', image: 'busybox:1.26.1', command: ['sh', '-c', 'echo hello2']}] +---- +==== diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-kubernetes.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-kubernetes.adoc index 1e53a5ad9a..878d33d255 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-kubernetes.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-kubernetes.adoc @@ -22,6 +22,9 @@ By default, all the features are enabled. The `/features` REST endpoint provides information on the features that have been enabled and disabled. +[[configuration-kubernetes-app-props]] +include::configuration-kubernetes-app-properties.adoc[] + [[configuration-kubernetes-deployer]] === Deployer Properties You can use the following configuration properties the https://github.com/spring-cloud/spring-cloud-deployer-kubernetes[Kubernetes deployer] to customize how Streams and Tasks are deployed. @@ -69,6 +72,26 @@ These properties are also used when configuring the < +|startupProbeDelay +|Delay in seconds when the Kubernetes startup check of the app container should start checking its health status. +|30 + +|startupProbePeriod +|Period in seconds for performing the Kubernetes startup check of the app container. +|3 + +|startupProbeFailure +|Number of probe failures allowed for the startup probe before the pod is restarted. +|20 + +|startupHttpProbePath +|Path that app container has to respond to for startup check. +| + +|startupProbePort +|Port that app container has to respond on for startup check. +| + |readinessProbeDelay |Delay in seconds when the readiness check of the app container should start checking if the module is fully up and running. |10 @@ -101,6 +124,18 @@ These properties are also used when configuring the < +|limits.ephemeral-storage +|The ephemeral-storage limit, maximum needed value to allocate a pod. +| + +|limits.hugepages-2Mi +|The hugepages-2Mi limit, maximum needed value to allocate a pod. +| + +|limits.hugepages-1Gi +|The hugepages-1Gi limit, maximum needed value to allocate a pod. +| + |requests.memory |The memory request, guaranteed needed value to allocate a pod. | @@ -109,6 +144,30 @@ These properties are also used when configuring the < +|requests.ephemeral-storage +|The ephemeral-storage request, guaranteed needed value to allocate a pod. +| + +|requests.hugepages-2Mi +|The hugepages-2Mi request, guaranteed needed value to allocate a pod. +| + +|requests.hugepages-1Gi +|The hugepages-1Gi request, guaranteed needed value to allocate a pod. +| + +|affinity.nodeAffinity +|The node affinity expressed in YAML format. e.g. ```{ requiredDuringSchedulingIgnoredDuringExecution: { nodeSelectorTerms: [ { matchExpressions: [ { key: 'kubernetes.io/e2e-az-name', operator: 'In', values: [ 'e2e-az1', 'e2e-az2']}]}]}, preferredDuringSchedulingIgnoredDuringExecution: [ { weight: 1, preference: { matchExpressions: [ { key: 'another-node-label-key', operator: 'In', values: [ 'another-node-label-value' ]}]}}]}``` +| + +|affinity.podAffinity +|The pod affinity expressed in YAML format. e.g. ```{ requiredDuringSchedulingIgnoredDuringExecution: { labelSelector: [ { matchExpressions: [ { key: 'app', operator: 'In', values: [ 'store']}]}], topologyKey: 'kubernetes.io/hostnam'}, preferredDuringSchedulingIgnoredDuringExecution: [ { weight: 1, podAffinityTerm: { labelSelector: { matchExpressions: [ { key: 'security', operator: 'In', values: [ 'S2' ]}]}, topologyKey: 'failure-domain.beta.kubernetes.io/zone'}}]}``` +| + +|affinity.podAntiAffinity +|The pod anti-affinity expressed in YAML format. e.g. ```{ requiredDuringSchedulingIgnoredDuringExecution: { labelSelector: { matchExpressions: [ { key: 'app', operator: 'In', values: [ 'store']}]}], topologyKey: 'kubernetes.io/hostname'}, preferredDuringSchedulingIgnoredDuringExecution: [ { weight: 1, podAffinityTerm: { labelSelector: { matchExpressions: [ { key: 'security', operator: 'In', values: [ 'S2' ]}]}, topologyKey: 'failure-domain.beta.kubernetes.io/zone'}}]}``` +| + |statefulSet.volumeClaimTemplate.storageClassName |Name of the storage class for a stateful set | @@ -141,6 +200,14 @@ These properties are also used when configuring the < +|priorityClassName +|Pod Spec priorityClassName. Create a PriorityClass in Kubernetes before using this property. See https://kubernetes.io/docs/concepts/scheduling-eviction/pod-priority-preemption/[Pod Priority and Preemption] +| + +|shareProcessNamespace +| Will assign value to Pod.spec.shareProcessNamespace. See https://kubernetes.io/docs/tasks/configure-pod-container/share-process-namespace/[Share Process Namespace between Containers in a Pod] +| + |minutesToWaitForLoadBalancer |Time to wait for load balancer to be available before attempting delete of service (in minutes). |5 @@ -250,27 +317,105 @@ These properties are also used when configuring the < |maximumConcurrentTasks -|The maximum concurrent tasks allowed for this platform instance. +|The maximum concurrent tasks allowed for this platform instance |20 +[[pod-security-context-props]] +|podSecurityContext +|The security context applied to the pod expressed in YAML format. e.g. ```{runAsUser: 65534, fsGroup: 65534, supplementalGroups: [65534, 65535], seccompProfile: { type: 'RuntimeDefault' }}```. Note this defines the entire pod security context - smaller portions of the security context can instead be configured via the `podSecurityContext.**` properties below. +| + |podSecurityContext.runAsUser |The numeric user ID to run pod container processes under | +|podSecurityContext.runAsGroup +|The numeric group id to run the entrypoint of the container process +| + +|podSecurityContext.runAsNonRoot +|Indicates that the container must run as a non-root user +| + |podSecurityContext.fsGroup -|The numeric group ID to run pod container processes under +|The numeric group ID for the volumes of the pod | -|affinity.nodeAffinity -|The node affinity expressed in YAML format. e.g. ```{ requiredDuringSchedulingIgnoredDuringExecution: { nodeSelectorTerms: [ { matchExpressions: [ { key: 'kubernetes.io/e2e-az-name', operator: 'In', values: [ 'e2e-az1', 'e2e-az2']}]}]}, preferredDuringSchedulingIgnoredDuringExecution: [ { weight: 1, preference: { matchExpressions: [ { key: 'another-node-label-key', operator: 'In', values: [ 'another-node-label-value' ]}]}}]}``` +|podSecurityContext.fsGroupChangePolicy +|Defines behavior of changing ownership and permission of the volume before being exposed inside pod (only applies to volume types which support fsGroup based ownership and permissions) - possible values are "OnRootMismatch", "Always" | -|affinity.podAffinity -|The pod affinity expressed in YAML format. e.g. ```{ requiredDuringSchedulingIgnoredDuringExecution: { labelSelector: [ { matchExpressions: [ { key: 'app', operator: 'In', values: [ 'store']}]}], topologyKey: 'kubernetes.io/hostnam'}, preferredDuringSchedulingIgnoredDuringExecution: [ { weight: 1, podAffinityTerm: { labelSelector: { matchExpressions: [ { key: 'security', operator: 'In', values: [ 'S2' ]}]}, topologyKey: 'failure-domain.beta.kubernetes.io/zone'}}]}``` +|podSecurityContext.supplementalGroups +|The numeric group IDs applied to the pod container processes, in addition to the container's primary group ID | -|affinity.podAntiAffinity -|The pod anti-affinity expressed in YAML format. e.g. ```{ requiredDuringSchedulingIgnoredDuringExecution: { labelSelector: { matchExpressions: [ { key: 'app', operator: 'In', values: [ 'store']}]}], topologyKey: 'kubernetes.io/hostname'}, preferredDuringSchedulingIgnoredDuringExecution: [ { weight: 1, podAffinityTerm: { labelSelector: { matchExpressions: [ { key: 'security', operator: 'In', values: [ 'S2' ]}]}, topologyKey: 'failure-domain.beta.kubernetes.io/zone'}}]}``` +|podSecurityContext.seccompProfile +|The seccomp options to use for the pod containers expressed in YAML format. e.g. ```{ seccompProfile: { type: 'Localhost', localhostProfile: 'my-profiles/profile-allow.json' }}``` +| + +|podSecurityContext.seLinuxOptions +|The SELinux context to be applied to the pod containers expressed in YAML format. e.g. ```{ level: "s0:c123,c456" }``` (not used when spec.os.name is windows). +| + +|podSecurityContext.sysctls +|List of namespaced sysctls used for the pod expressed in YAML format. e.g. ```[{name: "kernel.shm_rmid_forced", value: 0}]``` (not used when spec.os.name is windows). +| + +|podSecurityContext.windowsOptions +|The Windows specific settings applied to all containers expressed in YAML format. e.g. ```{ gmsaCredentialSpec: "specA", gmsaCredentialSpecName: "specA-name"}``` (only used when spec.os.name is windows). +| + +[[container-security-context-props]] +|containerSecurityContext +|The security context applied to the containers expressed in YAML format. e.g. ```{allowPrivilegeEscalation: true, runAsUser: 65534}```. Note this defines the entire container security context - smaller portions of the security context can instead be configured via the `containerSecurityContext.**` properties below. +| + +|containerSecurityContext.allowPrivilegeEscalation +|Whether a process can gain more privileges than its parent process +| + +|containerSecurityContext.capabilities +|The capabilities to add/drop when running the container expressed in YAML format. e.g. ```{ add: [ "a", "b" ], drop: [ "c" ] }``` (only used when spec.os.name is not windows) +| + +|containerSecurityContext.privileged +|Run container in privileged mode. +| + +|containerSecurityContext.procMount +|The type of proc mount to use for the container (only used when spec.os.name is not windows) +| + +|containerSecurityContext.readOnlyRootFilesystem +|Mounts the container's root filesystem as read-only +| + +|containerSecurityContext.runAsUser +|The numeric user ID to run pod container processes under +| + +|containerSecurityContext.runAsGroup +|The numeric group id to run the entrypoint of the container process +| + +|containerSecurityContext.runAsNonRoot +|Indicates that the container must run as a non-root user +| + +|containerSecurityContext.seccompProfile +|The seccomp options to use for the pod containers expressed in YAML format. e.g. ```{ seccompProfile: { type: 'Localhost', localhostProfile: 'my-profiles/profile-allow.json' }}``` +| + +|containerSecurityContext.seLinuxOptions +|The SELinux context to be applied to the pod containers expressed in YAML format. e.g. ```{ level: "s0:c123,c456" }``` (not used when spec.os.name is windows). +| + +|containerSecurityContext.sysctls +|List of namespaced sysctls used for the pod expressed in YAML format. e.g. ```[{name: "kernel.shm_rmid_forced", value: 0}]``` (not used when spec.os.name is windows). +| + +|containerSecurityContext.windowsOptions +|The Windows specific settings applied to all containers expressed in YAML format. e.g. ```{ gmsaCredentialSpec: "specA", gmsaCredentialSpecName: "specA-name"}``` (only used when spec.os.name is windows). | |statefulSetInitContainerImageName @@ -278,11 +423,11 @@ These properties are also used when configuring the < |initContainer -|An Init Container expressed in YAML format to be applied to a pod. e.g. ```{containerName: 'test', imageName: 'busybox:latest', commands: ['sh', '-c', 'echo hello']}``` +|An Init Container expressed in YAML format to be applied to a pod. e.g. ```{containerName: 'test', imageName: 'busybox:1', commands: ['sh', '-c', 'echo hello']}``` | |additionalContainers -|Additional containers expressed in YAML format to be applied to a pod. e.g. ```[{name: 'c1', image: 'busybox:latest', command: ['sh', '-c', 'echo hello1'], volumeMounts: [{name: 'test-volume', mountPath: '/tmp', readOnly: true}]}, {name: 'c2', image: 'busybox:1.26.1', command: ['sh', '-c', 'echo hello2']}]``` +|Additional containers expressed in YAML format to be applied to a pod. e.g. ```[{name: 'c1', image: 'busybox:1', command: ['sh', '-c', 'echo hello1'], volumeMounts: [{name: 'test-volume', mountPath: '/tmp', readOnly: true}]}, {name: 'c2', image: 'busybox:1.26.1', command: ['sh', '-c', 'echo hello2']}]``` | |=== @@ -308,7 +453,7 @@ spring: accounts: dev: namespace: devNamespace - imagePullPolicy: Always + imagePullPolicy: IfNotPresent entryPointStyle: exec limits: cpu: 4 @@ -340,7 +485,7 @@ The Spring Cloud Data Flow server for Kubernetes uses the https://github.com/fab You can pass configuration properties to the Data Flow Server by using Kubernetes https://kubernetes.io/docs/tasks/configure-pod-container/configmap/[ConfigMap] and https://kubernetes.io/docs/concepts/configuration/secret/[secrets]. -The following example shows one possible configuration, which enables MySQL and sets a memory limit: +The following example shows one possible configuration, which enables MariaDB and sets a memory limit: [source,yaml] @@ -364,17 +509,17 @@ data: limits: memory: 1024Mi datasource: - url: jdbc:mysql://${MYSQL_SERVICE_HOST}:${MYSQL_SERVICE_PORT}/mysql + url: jdbc:mariadb://${MARIADB_SERVICE_HOST}:${MARIADB_SERVICE_PORT}/database username: root - password: ${mysql-root-password} + password: ${database-password} driverClassName: org.mariadb.jdbc.Driver testOnBorrow: true validationQuery: "SELECT 1" ---- -The preceding example assumes that MySQL is deployed with `mysql` as the service name. Kubernetes publishes the host and port values of these services as environment variables that we can use when configuring the apps we deploy. +The preceding example assumes that MariaDB is deployed with `mariadb` as the service name. Kubernetes publishes the host and port values of these services as environment variables that we can use when configuring the apps we deploy. -We prefer to provide the MySQL connection password in a Secrets file, as the following example shows: +We prefer to provide the MariaDB connection password in a Secrets file, as the following example shows: [source,yaml] @@ -382,25 +527,24 @@ We prefer to provide the MySQL connection password in a Secrets file, as the fol apiVersion: v1 kind: Secret metadata: - name: mysql + name: mariadb labels: - app: mysql + app: mariadb data: - mysql-root-password: eW91cnBhc3N3b3Jk + database-password: eW91cnBhc3N3b3Jk ---- The password is a base64-encoded value. [[configuration-kubernetes-rdbms]] -=== Database Configuration +=== Database -Spring Cloud Data Flow provides schemas for H2, HSQLDB, MySQL, Oracle, PostgreSQL, DB2, and SQL Server. The appropriate schema is automatically created when the server starts, provided the right database driver and appropriate credentials are in the classpath. +include::configuration-database.adoc[] -The JDBC drivers for MySQL (via MariaDB driver), HSQLDB, PostgreSQL, and embedded H2 are available out of the box. -If you use any other database, you need to put the corresponding JDBC driver jar on the classpath of the server. +==== Database configuration -For instance, if you use MySQL in addition to a password in the secrets file, you could provide the following properties in the ConfigMap: +When running in Kubernetes, the database properties are typically set in the ConfigMap. For instance, if you use MariaDB in addition to a password in the secrets file, you could provide the following properties in the ConfigMap: [source,yaml] @@ -409,17 +553,13 @@ data: application.yaml: |- spring: datasource: - url: jdbc:mysql://${MYSQL_SERVICE_HOST}:${MYSQL_SERVICE_PORT}/mysql + url: jdbc:mariadb://${MARIADB_SERVICE_HOST}:${MARIADB_SERVICE_PORT}/database username: root - password: ${mysql-root-password} - driverClassName: org.mariadb.jdbc.Driver - url: jdbc:mysql://${MYSQL_SERVICE_HOST}:${MYSQL_SERVICE_PORT}/test + password: ${database-password} driverClassName: org.mariadb.jdbc.Driver ---- - -For PostgreSQL, you could use the following configuration: - +Similarly, for PostgreSQL you could use the following configuration: [source,yaml] ---- @@ -433,21 +573,6 @@ data: driverClassName: org.postgresql.Driver ---- - -For HSQLDB, you could use the following configuration: - - -[source,yaml] ----- -data: - application.yaml: |- - spring: - datasource: - url: jdbc:hsqldb:hsql://${HSQLDB_SERVICE_HOST}:${HSQLDB_SERVICE_PORT}/database - username: sa - driverClassName: org.hsqldb.jdbc.JDBCDriver ----- - The following YAML snippet from a Deployment is an example of mounting a ConfigMap as `application.yaml` under `/config` where Spring Boot will process it plus a Secret mounted under `/etc/secrets` where it will get picked up by the spring-cloud-kubernetes library due to the environment variable `SPRING_CLOUD_KUBERNETES_SECRETS_PATHS` being set to `/etc/secrets`. [source,yaml] @@ -455,8 +580,8 @@ The following YAML snippet from a Deployment is an example of mounting a ConfigM ... containers: - name: scdf-server - image: springcloud/spring-cloud-dataflow-server:2.5.0.BUILD-SNAPSHOT - imagePullPolicy: Always + image: springcloud/spring-cloud-dataflow-server:2.11.3-SNAPSHOT + imagePullPolicy: IfNotPresent volumeMounts: - name: config mountPath: /config @@ -475,8 +600,8 @@ The following YAML snippet from a Deployment is an example of mounting a ConfigM path: application.yaml - name: database secret: - secretName: mysql ----- + secretName: mariadb +---- You can find migration scripts for specific database types in the https://github.com/spring-cloud/spring-cloud-task/tree/master/spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration[spring-cloud-task] repo. @@ -496,12 +621,12 @@ kubectl get all,cm,secrets,pvc ---- -You can list all resources used by a specific application or service by using a label to select resources. The following command lists all resources used by the `mysql` service: +You can list all resources used by a specific application or service by using a label to select resources. The following command lists all resources used by the `mariadb` service: [source,shell] ---- -kubectl get all -l app=mysql +kubectl get all -l app=mariadb ---- @@ -669,15 +794,15 @@ If upgrading from a previous version of SCDF be sure to verify that `spring.data default: secretKeyRefs: - envVarName: "spring.datasource.password" - secretName: mysql - dataKey: mysql-root-password + secretName: mariadb + dataKey: database-password - envVarName: "spring.datasource.username" - secretName: mysql - dataKey: mysql-root-username + secretName: mariadb + dataKey: database-username ... ---- -Also verify that the associated secret(dataKey) is also available in secrets. SCDF provides an example of this for MySql here: `src/kubernetes/mysql/mysql-svc.yaml`. +Also verify that the associated secret(dataKey) is also available in secrets. SCDF provides an example of this for MariaDB here: `src/kubernetes/mariadb/mariadb-svc.yaml`. NOTE: Passing of DB credentials via properties by default is to preserve to backwards compatibility. This will be feature will be removed in future release. @@ -688,7 +813,6 @@ This section covers customization of how scheduled tasks are configured. Schedul NOTE: Unless noted, properties set on a per-schedule basis always take precedence over properties set as the server configuration. This arrangement allows for the ability to override global server level properties for a specific schedule. -See https://github.com/spring-cloud/spring-cloud-scheduler-kubernetes/blob/master/src/main/java/org/springframework/cloud/scheduler/spi/kubernetes/KubernetesSchedulerProperties.java[`KubernetesSchedulerProperties`] for more on the supported options. ==== Entry Point Style @@ -715,7 +839,7 @@ You can also configure the Entry Point Style at the server level in the containe [source] ---- env: -- name: SPRING_CLOUD_SCHEDULER_KUBERNETES_ENTRY_POINT_STYLE +- name: SPRING_CLOUD_DEPLOYER_KUBERNETES_ENTRY_POINT_STYLE value: entryPointStyle ---- @@ -726,6 +850,44 @@ You should choose an Entry Point Style of either `exec` or `shell`, to correspon Using the `boot` Entry Point Style corresponds to using the `exec` style `ENTRYPOINT`. Command line arguments from the deployment request are passed to the container, with the addition of application properties mapped into the `SPRING_APPLICATION_JSON` environment variable rather than command line arguments. +===== ttlSecondsAfterFinished + +When scheduling an application, You can clean up finished Jobs (either Complete or Failed) automatically by specifying `ttlSecondsAfterFinished` value. + +The following example shows how you can configure for scheduled application jobs: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.cron.ttlSecondsAfterFinished=86400 +---- +==== + +The following example shows how you can individually configure application jobs: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.ttlSecondsAfterFinished=86400 +---- +==== + +Replace `` with the name of your application and the `ttlSecondsAfterFinished` attribute with the appropriate value for clean up finished Jobs. + +You can configure the `ttlSecondsAfterFinished` at the global server level as well. + +The following example shows how to do so for tasks: + +You can configure an image pull policy at the server level in the container `env` section of a deployment YAML, as the following example shows: + +[source] +---- +env: +- name: SPRING_CLOUD_DEPLOYER_KUBERNETES_TTL_SECONDS_AFTER_FINISHED + value: 86400 +---- + + ==== Environment Variables To influence the environment settings for a given application, you can take advantage of the `spring.cloud.deployer.kubernetes.environmentVariables` property. @@ -748,7 +910,7 @@ NOTE: When specifying environment variables in the server configuration and on a [source] ---- env: -- name: SPRING_CLOUD_SCHEDULER_KUBERNETES_ENVIRONMENT_VARIABLES +- name: SPRING_CLOUD_DEPLOYER_KUBERNETES_ENVIRONMENT_VARIABLES value: myVar=myVal ---- @@ -768,7 +930,7 @@ The following example shows how you can individually configure containers: [source,options=nowrap] ---- -deployer.kubernetes.imagePullPolicy=Always +deployer.kubernetes.imagePullPolicy=IfNotPresent ---- @@ -780,7 +942,7 @@ You can configure an image pull policy at the server level in the container `env [source] ---- env: -- name: SPRING_CLOUD_SCHEDULER_KUBERNETES_IMAGE_PULL_POLICY +- name: SPRING_CLOUD_DEPLOYER_KUBERNETES_IMAGE_PULL_POLICY value: Always ---- @@ -808,7 +970,7 @@ You can also configure the image pull secret at the server level in the containe [source] ---- env: -- name: SPRING_CLOUD_SCHEDULER_KUBERNETES_IMAGE_PULL_SECRET +- name: SPRING_CLOUD_DEPLOYER_KUBERNETES_IMAGE_PULL_SECRET value: mysecret ---- @@ -823,7 +985,7 @@ By default the namespace used for scheduled tasks is `default`. This value can b [source] ---- env: -- name: SPRING_CLOUD_SCHEDULER_KUBERNETES_NAMESPACE +- name: SPRING_CLOUD_DEPLOYER_KUBERNETES_NAMESPACE value: mynamespace ---- @@ -857,7 +1019,7 @@ You can also configure the service account name at the server level in the conta [source] ---- env: -- name: SPRING_CLOUD_SCHEDULER_KUBERNETES_TASK_SERVICE_ACCOUNT_NAME +- name: SPRING_CLOUD_DEPLOYER_KUBERNETES_TASK_SERVICE_ACCOUNT_NAME value: myserviceaccountname ---- diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-local.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-local.adoc index d315fbdc4b..8f14a22c81 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-local.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-local.adoc @@ -26,181 +26,47 @@ By default, stream (requires Skipper), and tasks are enabled and Task Scheduler The REST `/about` endpoint provides information on the features that have been enabled and disabled. -[[configuration-local-rdbms]] -=== Database - -A relational database is used to store stream and task definitions as well as the state of executed tasks. -Spring Cloud Data Flow provides schemas for *H2*, *MySQL*, *Oracle*, *PostgreSQL*, *Db2*, and *SQL Server*. The schema is automatically created when the server starts. - -By default, Spring Cloud Data Flow offers an embedded instance of the *H2* database. The *H2* database is good -for development purposes but is not recommended for production use. - -NOTE: *H2* database is not supported as an external mode. +[[configuration-local-java-home]] +=== Java Home -The JDBC drivers for *MySQL* (through the MariaDB driver), *PostgreSQL*, *SQL Server*, and embedded *H2* are available without additional configuration. -If you are using any other database, then you need to put the corresponding JDBC driver jar on the classpath of the server. +When launching Spring Cloud Data Flow or Skipper Server they may need to know where Java 17 home is in order to successfully launch Spring Boot 3 applications. -The database properties can be passed as environment variables or command-line arguments to the Data Flow Server. +By passing the following property you can provide the path. -==== MySQL - -The following example shows how to define a MySQL database connection using MariaDB driver. - -[source,bash,subs=attributes] ----- +[source,shell] +.... java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url=jdbc:mysql://localhost:3306/mydb \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.datasource.driver-class-name=org.mariadb.jdbc.Driver ----- - -MySQL versions up to _5.7_ can be used with a MariaDB driver. Starting from version _8.0_ MySQL's own driver has to be used. - -[source,bash,subs=attributes] ----- -java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url=jdbc:mysql://localhost:3306/mydb \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.datasource.driver-class-name=com.mysql.jdbc.Driver ----- - -NOTE: Due to licensing restrictions we're unable to bundle MySQL driver. You need to add it to - server's classpath yourself. - -==== MariaDB + --spring.cloud.dataflow.defaults.boot3.local.javaHomePath=/usr/lib/jvm/java-17 \ + --spring.cloud.dataflow.defaults.boot2.local.javaHomePath=/usr/lib/jvm/java-1.8 +.... -The following example shows how to define a MariaDB database connection with command Line arguments - -[source,bash,subs=attributes] ----- -java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url=jdbc:mariadb://localhost:3306/mydb?useMysqlMetadata=true \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.datasource.driver-class-name=org.mariadb.jdbc.Driver ----- +[[configuration-local-rdbms]] +=== Database -Starting with MariaDB v2.4.1 connector release, it is required to also add `useMysqlMetadata=true` -to the JDBC URL. This is a required workaround until when MySQL and MariaDB entirely switch as two -different databases. +include::configuration-database.adoc[] -MariaDB version _10.3_ introduced a support for real database sequences which is yet another breaking -change while toolings around these databases fully support MySQL and MariaDB as a separate database -types. Workaround is to use older hibernate dialect which doesn't try to use sequences. +==== Database configuration +When running locally, the database properties can be passed as environment variables or command-line arguments to the Data Flow Server. For example, to start the server with MariaDB using command line arguments execute the following command: [source,bash,subs=attributes] ---- java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url=jdbc:mariadb://localhost:3306/mydb?useMysqlMetadata=true \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.MariaDB102Dialect \ + --spring.datasource.url=jdbc:mariadb://localhost:3306/mydb \ + --spring.datasource.username=user \ + --spring.datasource.password=pass \ --spring.datasource.driver-class-name=org.mariadb.jdbc.Driver ---- - -==== PostgreSQL - -The following example shows how to define a PostgreSQL database connection with command line arguments: - -[source,bash,subs=attributes] ----- -java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url=jdbc:postgresql://localhost:5432/mydb \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.datasource.driver-class-name=org.postgresql.Driver ----- - -==== SQL Server - -The following example shows how to define a SQL Server database connection with command line arguments: - -[source,bash,subs=attributes] ----- -java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url='jdbc:sqlserver://localhost:1433;databaseName=mydb' \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.datasource.driver-class-name=com.microsoft.sqlserver.jdbc.SQLServerDriver ----- - -==== Db2 - -The following example shows how to define a Db2 database connection with command line arguments: - +Likewise, to start the server with MariaDB using environment variables execute the following command: [source,bash,subs=attributes] ---- -java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url=jdbc:db2://localhost:50000/mydb \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.datasource.driver-class-name=com.ibm.db2.jcc.DB2Driver ----- - -NOTE: Due to licensing restrictions we're unable to bundle Db2 driver. You need to add it to - server's classpath yourself. - -==== Oracle - -The following example shows how to define a Oracle database connection with command line arguments: - -[source,bash,subs=attributes] ----- -java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url=jdbc:oracle:thin:@localhost:1521/MYDB \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.datasource.driver-class-name=oracle.jdbc.OracleDriver ----- - -NOTE: Due to licensing restrictions we're unable to bundle Oracle driver. You need to add it to - server's classpath yourself. - -==== Adding a Custom JDBC Driver -To add a custom driver for the database (for example, Oracle), you should rebuild the Data Flow Server and add the dependency to the Maven `pom.xml` file. -You need to modify the maven `pom.xml` of `spring-cloud-dataflow-server` module. -There are GA release tags in GitHub repository, so you can switch to desired GA tags to add the drivers on the production-ready codebase. - -To add a custom JDBC driver dependency for the Spring Cloud Data Flow server: - -. Select the tag that corresponds to the version of the server you want to rebuild and clone the github repository. -. Edit the spring-cloud-dataflow-server/pom.xml and, in the `dependencies` section, add the dependency for the database driver required. In the following example , an Oracle driver has been chosen: - -[source, xml] ----- - -... - - com.oracle.jdbc - ojdbc8 - 12.2.0.1 - -... - ----- - -[start=3] -. Build the application as described in <> - -You can also provide default values when rebuilding the server by adding the necessary properties to the dataflow-server.yml file, -as shown in the following example for PostgreSQL: - -[source] ----- -spring: - datasource: - url: jdbc:postgresql://localhost:5432/mydb - username: myuser - password: mypass - driver-class-name:org.postgresql.Driver +SPRING_DATASOURCE_URL=jdbc:mariadb://localhost:3306/mydb +SPRING_DATASOURCE_USERNAME=user +SPRING_DATASOURCE_PASSWORD=pass +SPRING_DATASOURCE_DRIVER_CLASS_NAME=org.mariadb.jdbc.Driver +SPRING_JPA_DATABASE_PLATFORM=org.hibernate.dialect.MariaDB106Dialect +java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar ---- -[start=4] -. Alternatively, you can build a custom Spring Cloud Data Flow server with your build files. -There are examples of a custom server builds in our https://github.com/spring-cloud/spring-cloud-dataflow-samples/tree/master/custom-dataflow-builds[samples repo] if there is a need to add a driver jars. - [[configuration-local-rdbms-schema]] ==== Schema Handling On default database schema is managed with _Flyway_ which is convenient if it's @@ -221,9 +87,6 @@ Here's a description what happens when _Dataflow_ server is started: may be in place if a shared DB is used. * If schema is empty, flyway assumes to start from a scratch. * Goes through all needed schema migrations. -* Due to historical reasons, if we detect that schema is from _1.7.x_ line - we convert these to structures needed from _2.0.x_ onwards and fully - continue with flyway. [NOTE] ==== @@ -260,6 +123,10 @@ These properties are also used when configuring < +| Path to JDK installation for launching applications depending on their registered Boot version. `bootVersion` should be `2` or `3`. +| System property `java.home` + |shutdownTimeout |Max number of seconds to wait for app shutdown. |30 @@ -302,18 +169,17 @@ The logging configuration is located on the classpath contained in a file named By default, the log file is configured to use: -``` - - -``` +[source,xml] +---- + +---- with the logback configuration for the `RollingPolicy`: - +[source,xml] ---- - - ${LOG_FILE}.log + ${LOG_FILE} @@ -326,15 +192,15 @@ with the logback configuration for the `RollingPolicy`: ${FILE_LOG_PATTERN} - ---- To check the `java.io.tmpdir` for the current Spring Cloud Data Flow Server `local` server, -``` +[source,shell] +---- jinfo | grep "java.io.tmpdir" -``` +---- If you want to change or override any of the properties `LOG_FILE`, `LOG_PATH`, `LOG_TEMP`, `LOG_FILE_MAX_SIZE`, `LOG_FILE_MAX_HISTORY` and `LOG_FILE_TOTAL_SIZE_CAP`, please set them as system properties. @@ -347,7 +213,7 @@ Data Flow Server delegates to the Skipper server the management of the Stream's $ java -jar spring-cloud-dataflow-server-{project-version}.jar --spring.cloud.skipper.client.serverUri=https://192.51.100.1:7577/api ---- -The configuration of show streams are deployed and to which platforms, is done by configuration of `platform accounts` on the Skipper server. +The configuration of how streams are deployed and to which platforms, is done by configuration of `platform accounts` on the Skipper server. See the documentation on https://docs.spring.io/spring-cloud-skipper/docs/current/reference/htmlsingle/#platforms[platforms] for more information. @@ -359,7 +225,7 @@ For Tasks which are Spring Batch Jobs, the job and step execution data is also s As with streams launched by Skipper, Tasks can be launched to multiple platforms. If no platform is defined, a platform named `default` is created using the default values of the class https://github.com/spring-cloud/spring-cloud-deployer-local/blob/master/spring-cloud-deployer-local/src/main/java/org/springframework/cloud/deployer/spi/local/LocalDeployerProperties.java[LocalDeployerProperties], which is summarized in the table <> -To configure new platform accounts for the local platform, provide an entry under the `spring.cloud.dataflow.task.platform.local` section in your `application.yaml` file for via another Spring Boot supported mechanism. +To configure new platform accounts for the local platform, provide an entry under the `spring.cloud.dataflow.task.platform.local` section in your `application.yaml` file or via another Spring Boot supported mechanism. In the following example, two local platform accounts named `localDev` and `localDevDebug` are created. The keys such as `shutdownTimeout` and `javaOpts` are local deployer properties. @@ -390,139 +256,12 @@ You can configure the Data Flow server that is running locally to deploy tasks t Detailed examples for launching and scheduling tasks across multiple platforms, are available in this section https://dataflow.spring.io/docs/recipes/multi-platform-deployment/[Multiple Platform Support for Tasks] on http://dataflow.spring.io. -===== Start Skipper - -[source,bash] ----- -git clone https://github.com/spring-cloud/spring-cloud-skipper.git -cd spring-cloud/spring-cloud-skipper -./mvnw clean package -DskipTests=true -java -jar spring-cloud-skipper-server/target/spring-cloud-skipper-server-2.2.0.BUILD-SNAPSHOT.jar ----- - -===== Start Spring Cloud Data Flow - -[source,bash] ----- -git clone https://github.com/spring-cloud/spring-cloud-dataflow.git -cd spring-cloud-dataflow -./mvnw clean package -DskipTests=true -cd .. ----- - -Create a yaml file scdf.yml with the following contents: - -[source,yaml] ----- -spring: - cloud: - dataflow: - security: - authorization: - provider-role-mappings: - uaa: - map-oauth-scopes: true - role-mappings: - ROLE_CREATE: foo.create - ROLE_DEPLOY: foo.create - ROLE_DESTROY: foo.create - ROLE_MANAGE: foo.create - ROLE_MODIFY: foo.create - ROLE_SCHEDULE: foo.create - ROLE_VIEW: foo.view - security: - oauth2: - client: - registration: - uaa: - redirect-uri: '{baseUrl}/login/oauth2/code/{registrationId}' - authorization-grant-type: authorization_code - client-id: dataflow - client-secret: dataflow - scope: <1> - - openid - - foo.create - - foo.view - provider: - uaa: - jwk-set-uri: http://uaa:8080/uaa/token_keys - token-uri: http://uaa:8080/uaa/oauth/token - user-info-uri: http://uaa:8080/uaa/userinfo <2> - user-name-attribute: user_name - authorization-uri: http://uaa:8080/uaa/oauth/authorize - resourceserver: - opaquetoken: <3> - introspection-uri: http://uaa:8080/uaa/introspect - client-id: dataflow - client-secret: dataflow ----- - -<1> If you use scopes to identify roles, please make sure to also request - the relevant scopes, e.g `dataflow.view`, `dataflow.create` and don't forget to request the `openid` scope -<2> Used to retrieve profile information, e.g. username for display purposes (mandatory) -<3> Used for token introspection and validation (mandatory) - -The `introspection-uri` property is especially important when passing an externally retrieved (opaque) -OAuth Access Token to Spring Cloud Data Flow. In that case Spring Cloud Data Flow will take the OAuth Access, -and use the UAA's https://docs.cloudfoundry.org/api/uaa/version/74.4.0/index.html#introspect-token[Introspect Token Endpoint] -to not only check the validity of the token but also retrieve the associated OAuth scopes from the UAA - -Finally startup Spring Cloud Data Flow: - -[source,bash] ----- -java -jar spring-cloud-dataflow/spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-2.4.0.BUILD-SNAPSHOT.jar --spring.config.additional-location=scdf.yml ----- - -[[configuration-security-role-mapping]] -===== Role Mappings - -By default all roles are assigned to users that login to Spring Cloud Data Flow. -However, you can set the property: - -`spring.cloud.dataflow.security.authorization.provider-role-mappings.uaa.map-oauth-scopes: true` +[[configuration-local-security]] +=== Security Configuration -This will instruct the underlying `DefaultAuthoritiesExtractor` to map -OAuth scopes to the respective authorities. The following scopes are supported: - -* Scope `dataflow.create` maps to the `CREATE` role -* Scope `dataflow.deploy` maps to the `DEPLOY` role -* Scope `dataflow.destroy` maps to the `DESTROY` role -* Scope `dataflow.manage` maps to the `MANAGE` role -* Scope `dataflow.modify` maps to the `MODIFY` role -* Scope `dataflow.schedule` maps to the `SCHEDULE` role -* Scope `dataflow.view` maps to the `VIEW` role - -Additionally you can also map arbitrary scopes to each of the Data Flow roles: - -[source,yaml] ----- -spring: - cloud: - dataflow: - security: - authorization: - provider-role-mappings: - uaa: - map-oauth-scopes: true # <1> - role-mappings: - ROLE_CREATE: dataflow.create # <2> - ROLE_DEPLOY: dataflow.deploy - ROLE_DESTROY: dataflow.destoy - ROLE_MANAGE: dataflow.manage - ROLE_MODIFY: dataflow.modify - ROLE_SCHEDULE: dataflow.schedule - ROLE_VIEW: dataflow.view ----- - -<1> Enables explicit mapping support from OAuth scopes to Data Flow roles -<2> When role mapping support is enabled, you must provide a mapping for -all 7 Spring Cloud Data Flow roles *ROLE_CREATE*, *ROLE_DEPLOY*, *ROLE_DESTROY*, *ROLE_MANAGE*, *ROLE_MODIFY*, *ROLE_SCHEDULE*, *ROLE_VIEW*. - -[TIP] -==== -You can assign an OAuth scope to multiple Spring Cloud Data Flow roles, giving you flexible regarding the granularity of your authorization configuration. -==== +[[configuration-local-security-cloudfoundry-uaa]] +==== CloudFoundry User Account and Authentication (UAA) Server +See the <> configuration section for details how to configure for local testing and development. [[configuration-security-ldap-authentication]] ==== LDAP Authentication @@ -588,62 +327,17 @@ uaac user get ---- ==== -[[configuration-security-ldap-uaa-example]] -===== LDAP Security and UAA Example Application - -In order to get up and running quickly and to help you understand the security architecture, we -provide the https://github.com/spring-cloud/spring-cloud-dataflow-samples/tree/master/security-ldap-uaa-example[LDAP Security and UAA Example] -on GitHub. - -[IMPORTANT] -==== -This is solely a demo/example application and shall not be used in production. -==== - -The setup consists of: - -* Spring Cloud Data Flow Server -* Skipper Server -* CloudFoundry User Account and Authentication (UAA) Server -* Lightweight Directory Access Protocol (LDAP) Server (provided by https://directory.apache.org/[Apache Directory Server] (ApacheDS)) - -Ultimately, as part of this example, you will learn how to configure and launch -a Composed Task using this security setup. - [[configuration-security-spring-security-oauth2-example]] ==== Spring Security OAuth2 Resource/Authorization Server Sample For local testing and development, you may also use the Resource and Authorization Server support provided by -https://projects.spring.io/spring-security-oauth/[Spring Security OAuth]. It -allows you to easily create your own (very basic) OAuth2 Server with the following simple annotations: - -* `@EnableResourceServer` -* `@EnableAuthorizationServer` +https://spring.io/projects/spring-security/[Spring Security]. It +allows you to easily create your own OAuth2 Server by configuring the SecurityFilterChain. -NOTE: In fact the UAA uses Spring Security OAuth2 under the covers, thus the basic endpoints -are the same. +Samples can be found at: +https://docs.spring.io/spring-security/reference/samples.html[Spring Security Samples] -A working example application can be found at: -https://github.com/ghillert/oauth-test-server/[https://github.com/ghillert/oauth-test-server/] - -Clone the project and configure Spring Cloud Data Flow with the respective Client ID and Client Secret: - -[source,yaml] ----- -security: - oauth2: - client: - client-id: myclient - client-secret: mysecret - access-token-uri: http://127.0.0.1:9999/oauth/token - user-authorization-uri: http://127.0.0.1:9999/oauth/authorize - resource: - user-info-uri: http://127.0.0.1:9999/me - token-info-uri: http://127.0.0.1:9999/oauth/check_token ----- - -IMPORTANT: This sample application is not intended for production use [[configuration-security-shell-authentication]] ==== Data Flow Shell Authentication @@ -658,7 +352,7 @@ $ java -jar spring-cloud-dataflow-shell-{project-version}.jar \ --dataflow.uri=http://localhost:9393 \ # <1> --dataflow.username=my_username \ # <2> --dataflow.password=my_password \ # <3> - --skip-ssl-validation true \ # <4> + --skip-ssl-validation \ # <4> ---- <1> Optional, defaults to http://localhost:9393. @@ -679,7 +373,7 @@ server-unknown:>dataflow config server \ --uri http://localhost:9393 \ # <1> --username myuser \ # <2> --password mysecret \ # <3> - --skip-ssl-validation true \ # <4> + --skip-ssl-validation \ # <4> ---- <1> Optional, defaults to http://localhost:9393. @@ -691,7 +385,7 @@ The following image shows a typical shell command to connect to and authenticate Flow Server: .Target and Authenticate with the Data Flow Server from within the Shell -image::{dataflow-asciidoc}/images/dataflow-security-shell-target.png[Target and Authenticate with the Data Flow Server from within the Shell, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-security-shell-target.png[Target and Authenticate with the Data Flow Server from within the Shell, scaledwidth="100%"] Once successfully targeted, you should see the following output: @@ -728,7 +422,7 @@ $ java -jar spring-cloud-dataflow-shell-{project-version}.jar \ ==== -=== About Configuration +=== About API Configuration The Spring Cloud Data Flow About Restful API result contains a display name, version, and, if specified, a URL for each of the major dependencies that comprise Spring Cloud Data Flow. The result (if enabled) also contains the @@ -736,34 +430,52 @@ sha1 and or sha256 checksum values for the shell dependency. The information that is returned for each of the dependencies is configurable by setting the following properties: -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-core.name: the -name to be used for the core. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-core.version: -the version to be used for the core. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-dashboard.name: the -name to be used for the dashboard. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-dashboard.version: -the version to be used for the dashboard. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-implementation.name: the -name to be used for the implementation. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-implementation.version: -the version to be used for the implementation. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.name: the -name to be used for the shell. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.version: -the version to be used for the shell. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.url: -the URL to be used for downloading the shell dependency. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha1: the sha1 -checksum value that is returned with the shell dependency info. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha256: -the sha256 checksum value that is returned with the shell dependency info. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha1-url: -if the `spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha1` -is not specified, SCDF uses the contents of the file specified at this URL for the checksum. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha256-url: -if the `spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha256` -is not specified, SCDF uses the contents of the file specified at this URL for the checksum. +[frame="none"] +[cols="6,4"] +|=== +|Property Name | Description + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-core.name# +|[.small]#Name to be used for the core# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-core.version# +|[.small]#Version to be used for the core# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-dashboard.name# +|[.small]#Name to be used for the dashboard# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-dashboard.version# +|[.small]#Version to be used for the dashboard# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-implementation.name# +|[.small]#Name to be used for the implementation# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-implementation.version# +|[.small]#Version to be used for the implementation# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.name# +|[.small]#Name to be used for the shell# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.version# +|[.small]#Version to be used for the shell# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.url# +|[.small]#URL to be used for downloading the shell dependency# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha1# +|[.small]#Sha1 checksum value that is returned with the shell dependency info# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha256# +|[.small]#Sha256 checksum value that is returned with the shell dependency info# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha1-url# +|[.small]#if `spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha1` +is not specified, SCDF uses the contents of the file specified at this URL for the checksum# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha256-url# +|[.small]#if the `spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha256` is not specified, SCDF uses the contents of the file specified at this URL for the checksum# +|=== + ==== Enabling Shell Checksum values By default, checksum values are not displayed for the shell dependency. If @@ -774,13 +486,21 @@ you need this feature enabled, set the There are reserved values (surrounded by curly braces) that you can insert into the URL that will make sure that the links are up to date: -* repository: if using a build-snapshot, milestone, or release candidate of +* `repository`: if using a build-snapshot, milestone, or release candidate of Data Flow, the repository refers to the repo-spring-io repository. Otherwise, it refers to Maven Central. -* version: Inserts the version of the jar/pom. +* `version`: Inserts the version of the jar/pom. For example, -`https://myrepository/org/springframework/cloud/spring-cloud-dataflow-shell/\{version}/spring-cloud-dataflow-shell-\{version}.jar` + +[source] +---- +https://myrepository/org/springframework/cloud/spring-cloud-dataflow-shell/{version}/spring-cloud-dataflow-shell-\{version}.jar +---- produces -`https://myrepository/org/springframework/cloud/spring-cloud-dataflow-shell/1.2.3.RELEASE/spring-cloud-dataflow-shell-1.2.3.RELEASE.jar` -if you were using the 1.2.3.RELEASE version of the Spring Cloud Data Flow Shell + +[source] +---- +https://myrepository/org/springframework/cloud/spring-cloud-dataflow-shell/2.1.4/spring-cloud-dataflow-shell-2.11.0.jar +---- +if you were using the `2.11.0` version of the Spring Cloud Data Flow Shell. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration.adoc index 3dbd0f47ae..41a0ad34f6 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration.adoc @@ -2,8 +2,8 @@ = Configuration [[configuration-maven]] -== Maven - +== Maven Resources +Spring Cloud Dataflow supports referencing artifacts via Maven (`maven:`). If you want to override specific Maven configuration properties (remote repositories, proxies, and others) or run the Data Flow Server behind a proxy, you need to specify those properties as command-line arguments when you start the Data Flow Server, as shown in the following example: @@ -319,7 +319,7 @@ Do not forget to target the Data Flow Server with the following command: ==== [source,bash] ---- -dataflow:> dataflow config server https://localhost:8443/ +dataflow:> dataflow config server --uri https://localhost:8443/ ---- ==== @@ -431,9 +431,10 @@ access the REST API. To do so, retrieve an OAuth2 Access Token from your OAuth2 provider and pass that access token to the REST Api by using the *Authorization* HTTP header, as follows: -``` +[source, shell] +---- $ curl -H "Authorization: Bearer " http://localhost:9393/ -H 'Accept: application/json' -``` +---- [[configuration-security-customizing-authorization]] ==== Customizing Authorization @@ -454,7 +455,87 @@ setting the boolean property `map-oauth-scopes` for your provider to `true` (the For example, if your provider's ID is `uaa`, the property would be `spring.cloud.dataflow.security.authorization.provider-role-mappings.uaa.map-oauth-scopes`. -For more details, see the chapter on <>. +[[configuration-security-role-mapping]] +===== Role Mappings + +By default all roles are assigned to users that login to Spring Cloud Data Flow. +However, you can set the property: + +`spring.cloud.dataflow.security.authorization.provider-role-mappings.uaa.map-oauth-scopes: true` + +This will instruct the underlying `DefaultAuthoritiesExtractor` to map +OAuth scopes to the respective authorities. The following scopes are supported: + +* Scope `dataflow.create` maps to the `CREATE` role +* Scope `dataflow.deploy` maps to the `DEPLOY` role +* Scope `dataflow.destroy` maps to the `DESTROY` role +* Scope `dataflow.manage` maps to the `MANAGE` role +* Scope `dataflow.modify` maps to the `MODIFY` role +* Scope `dataflow.schedule` maps to the `SCHEDULE` role +* Scope `dataflow.view` maps to the `VIEW` role + +Additionally you can also map arbitrary scopes to each of the Data Flow roles: + +[source,yaml] +---- +spring: + cloud: + dataflow: + security: + authorization: + provider-role-mappings: + uaa: + map-oauth-scopes: true # <1> + role-mappings: + ROLE_CREATE: dataflow.create # <2> + ROLE_DEPLOY: dataflow.deploy + ROLE_DESTROY: dataflow.destoy + ROLE_MANAGE: dataflow.manage + ROLE_MODIFY: dataflow.modify + ROLE_SCHEDULE: dataflow.schedule + ROLE_VIEW: dataflow.view +---- + +<1> Enables explicit mapping support from OAuth scopes to Data Flow roles +<2> When role mapping support is enabled, you must provide a mapping for +all 7 Spring Cloud Data Flow roles *ROLE_CREATE*, *ROLE_DEPLOY*, *ROLE_DESTROY*, *ROLE_MANAGE*, *ROLE_MODIFY*, *ROLE_SCHEDULE*, *ROLE_VIEW*. + +[TIP] +==== +You can assign an OAuth scope to multiple Spring Cloud Data Flow roles, giving you flexible regarding the granularity of your authorization configuration. +==== + +[[configuration-security-group-mapping]] +===== Group Mappings + +Mapping roles from scopes has its own problems as it may not be always possible +to change those in a given identity provider. If it's possible to define group claims +in a token returned from an identity provider, these can be used as well to +map into server roles. + +==== +[source,yaml] +---- +spring: + cloud: + dataflow: + security: + authorization: + provider-role-mappings: + uaa: + map-oauth-scopes: false + map-group-claims: true + group-claim: roles + group-mappings: + ROLE_CREATE: my-group-id + ROLE_DEPLOY: my-group-id + ROLE_DESTROY: my-group-id + ROLE_MANAGE: my-group-id + ROLE_MODIFY: my-group-id + ROLE_SCHEDULE: my-group-id + ROLE_VIEW: my-group-id +---- +==== You can also customize the role-mapping behavior by providing your own Spring bean definition that extends Spring Cloud Data Flow's `AuthorityMapper` interface. In that case, @@ -579,6 +660,8 @@ spring: - POST /tasks/executions/* => hasRole('ROLE_DEPLOY') - DELETE /tasks/executions/* => hasRole('ROLE_DESTROY') + - GET /tasks/thinexecutions => hasRole('ROLE_VIEW') + # Task Schedules - GET /tasks/schedules => hasRole('ROLE_VIEW') @@ -647,7 +730,7 @@ which the user is not authorized. ===== Securing the Spring Boot Management Endpoints When security is enabled, the -{spring-boot-docs-reference}/html/production-ready-monitoring.html[Spring Boot HTTP Management Endpoints] +{spring-boot-docs}/#actuator.monitoring[Spring Boot HTTP Management Endpoints] are secured in the same way as the other REST endpoints. The management REST endpoints are available under `/management` and require the `MANAGEMENT` role. @@ -818,3 +901,4 @@ By using the `token_format` parameter, you can request the token to be either: include::configuration-local.adoc[] include::configuration-cloudfoundry.adoc[] include::configuration-kubernetes.adoc[] +include::configuration-carvel.adoc[] diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/dashboard.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/dashboard.adoc index 0f58b77e6e..cdfcc705d6 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/dashboard.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/dashboard.adoc @@ -33,7 +33,7 @@ NOTE: The default Dashboard server port is `9393`. The following image shows the opening page of the Spring Cloud Data Flow dashboard: .The Spring Cloud Data Flow Dashboard -image::{dataflow-asciidoc}/images/dataflow-dashboard-about.png[The Spring Cloud Data Flow Dashboard, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-dashboard-about.png[The Spring Cloud Data Flow Dashboard, scaledwidth="100%"] @@ -46,7 +46,7 @@ You can import a number of applications at once by using the Bulk Import Applica The following image shows a typical list of available applications within the dashboard: .List of Available Applications -image::{dataflow-asciidoc}/images/dataflow-available-apps-list.png[List of available applications, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-available-apps-list.png[List of available applications, scaledwidth="100%"] @@ -79,7 +79,7 @@ After setting your definitions through one of these routes, click *Import Applic The following image shows an example page of one way to bulk import applications: .Bulk Import Applications -image::{dataflow-asciidoc}/images/dataflow-bulk-import-applications.png[Bulk Import Applications, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-bulk-import-applications.png[Bulk Import Applications, scaledwidth="100%"] @@ -93,7 +93,7 @@ A list of the used deployment properties is available by clicking on the applica The following image shows an example of the *Runtime* tab in use: .List of Running Applications -image::{dataflow-asciidoc}/images/dataflow-runtime.png[List of running applications, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-runtime.png[List of running applications, scaledwidth="100%"] @@ -121,13 +121,13 @@ Hovering over the boxes in the visual representation shows more details about th In the following screenshot, the `timer` stream has been expanded to show the visual representation: .List of Stream Definitions -image::{dataflow-asciidoc}/images/dataflow-streams-list-definitions.png[List of Stream Definitions, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-streams-list-definitions.png[List of Stream Definitions, scaledwidth="100%"] If you click the details button, the view changes to show a visual representation of that stream and any related streams. In the preceding example, if you click details for the `timer` stream, the view changes to the following view, which clearly shows the relationship between the three streams (two of them are tapping into the `timer` stream): .Stream Details Page -image::{dataflow-asciidoc}/images/dataflow-stream-details.png[Stream Details Page, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-stream-details.png[Stream Details Page, scaledwidth="100%"] @@ -148,7 +148,7 @@ The Spring Flo https://github.com/spring-projects/spring-flo/wiki[wiki] includes The following image shows the Flo designer in use: .Flo for Spring Cloud Data Flow -image::{dataflow-asciidoc}/images/dataflow-flo-create-stream.png[Flo for Spring Cloud Data Flo, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-flo-create-stream.png[Flo for Spring Cloud Data Flo, scaledwidth="100%"] @@ -168,17 +168,17 @@ You can switch between both views. TIP: The form builder offers stronger validation of the inputs. .The following image shows the form builder -image::{dataflow-asciidoc}/images/dataflow-stream-deploy-builder.png[Form builder, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-stream-deploy-builder.png[Form builder, scaledwidth="100%"] .The following image shows the same properties in the free text -image::{dataflow-asciidoc}/images/dataflow-stream-deploy-freetext.png[Free text, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-stream-deploy-freetext.png[Free text, scaledwidth="100%"] [[dashboard-stream-logs]] === Accessing Stream Logs Once the stream applications are deployed, their logs can be accessed from the Stream `summary` page, as the following image shows: -image::{dataflow-asciidoc}/images/dataflow-stream-logs.png[Stream Logs, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-stream-logs.png[Stream Logs, scaledwidth="100%"] [[dashboard-flo-streams-designer-fanin-fanout]] === Creating Fan-In and Fan-Out Streams @@ -187,7 +187,7 @@ In the <> chapter, you can learn The UI provides dedicated support for named destinations as well: .Flo for Spring Cloud Data Flow -image::{dataflow-asciidoc}/images/dataflow-flo-create-stream-fanin-fanout.png[Fan-in and Fan-out example, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-flo-create-stream-fanin-fanout.png[Fan-in and Fan-out example, scaledwidth="100%"] In this example, we have data from an _HTTP Source_ and a _JDBC Source_ that is being sent to the _sharedData_ channel, which represents a fan-in use case. @@ -202,7 +202,7 @@ To create the tap stream, connect the output connector of the _HTTP Source_ to t The connection is displayed as a dotted line, indicating that you created a tap stream. .Creating a Tap Stream -image::{dataflow-asciidoc}/images/dataflow-flo-create-tap-stream.png[Tap stream example, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-flo-create-tap-stream.png[Tap stream example, scaledwidth="100%"] The primary stream (_HTTP Source_ to _File Sink_) will be automatically named, in case you did not provide a name for the stream, yet. When creating tap streams, the primary stream must always be explicitly named. @@ -211,7 +211,7 @@ In the preceding image, the primary stream was named _HTTP_INGEST_. By using the Dashboard, you can also switch the primary stream so that it becomes the secondary tap stream. .Change Primary Stream to Secondary Tap Stream -image::{dataflow-asciidoc}/images/dataflow-flo-tap-stream-switch-to-primary-stream.png[Switch tap stream to primary stream, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-flo-tap-stream-switch-to-primary-stream.png[Switch tap stream to primary stream, scaledwidth="100%"] Hover over the existing primary stream, the line between _HTTP Source_ and _File Sink_. Several control icons appear, and, by clicking on the icon labeled _Switch to/from tap_, @@ -219,7 +219,7 @@ you change the primary stream into a tap stream. Do the same for the tap stream and switch it to a primary stream. .End Result of Switching the Primary Stream -image::{dataflow-asciidoc}/images/dataflow-flo-tap-stream-switch-to-primary-stream-result.png[End result of switching the tap stream to a primary stream, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-flo-tap-stream-switch-to-primary-stream-result.png[End result of switching the tap stream to a primary stream, scaledwidth="100%"] TIP: When interacting directly with <>, @@ -233,17 +233,17 @@ The *Import/Export* tab of the Dashboard includes a page that provides the optio The following image shows the streams export page: .Stream Utils Export page -image::{dataflow-asciidoc}/images/dataflow-streams-utils-export.png[Stream Utils Export, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-streams-utils-export.png[Stream Utils Export, scaledwidth="100%"] When importing the streams, you have to import from a valid JSON file. You can either manually draft the file or export the file from the streams export page. .Stream Utils Import page -image::{dataflow-asciidoc}/images/dataflow-streams-utils-import.png[Stream Utils Import, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-streams-utils-import.png[Stream Utils Import, scaledwidth="100%"] After importing the file, you get confirmation of whether the operation completed successfully. .Stream Utils Import Result page -image::{dataflow-asciidoc}/images/dataflow-streams-utils-import-result.png[Stream Utils Import Result, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-streams-utils-import-result.png[Stream Utils Import Result, scaledwidth="100%"] ifndef::omit-tasks-docs[] @@ -269,7 +269,7 @@ TIP: You can also use this tab to create Batch Jobs. The following image shows a typical list of task applications: .List of Task Apps -image::{dataflow-asciidoc}/images/dataflow-task-apps-list.png[List of Task Apps, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-task-apps-list.png[List of Task Apps, scaledwidth="100%"] On this screen, you can perform the following actions: @@ -288,13 +288,13 @@ This page lists the Data Flow task definitions and provides actions to launch or The following image shows the Definitions page: .List of Task Definitions -image::{dataflow-asciidoc}/images/dataflow-task-definitions-list.png[List of Task Definitions, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-task-definitions-list.png[List of Task Definitions, scaledwidth="100%"] ==== Create a Task Definition The following image shows a task definition composed of the timestamp application as well as the list of task applications that can be used to create a task definiton: -image::{dataflow-asciidoc}/images/dataflow-task-definition-create.png[List of Task Applications, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-task-definition-create.png[List of Task Applications, scaledwidth="100%"] On this page, you can also specify various properties that are used during the deployment of the application. Once you are satisfied with the task definition, you can click the *CREATE TASK* button. A dialog box then asks for a task definition name and description. At a minimum, you must provide a name for the new definition. @@ -316,7 +316,7 @@ NOTE: Task parameters are not typed. The following image shows the composed task designer: .Composed Task Designer -image::{dataflow-asciidoc}/images/dataflow-ctr-flo-tab.png[Composed Task Designer, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-flo-tab.png[Composed Task Designer, scaledwidth="100%"] @@ -327,7 +327,7 @@ To do so, click the *Tasks* tab and select the task you want to launch by pressi The following image shows the Task Launch page: .Task Launch Page -image::{dataflow-asciidoc}/images/dataflow-task-launch.png[Task Launch, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-task-launch.png[Task Launch, scaledwidth="100%"] ==== Import/Export Tasks @@ -337,17 +337,17 @@ The *Import/Export* page provides the option to import and export tasks. This The following image shows the tasks export page: .Tasks Utils Export page -image::{dataflow-asciidoc}/images/dataflow-tasks-utils-export.png[Tasks Utils Export, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-tasks-utils-export.png[Tasks Utils Export, scaledwidth="100%"] Similarly, you can import task definitions. To do so, click the *Import/Export* option on the left side of page. From here, click the *Import task(s): Import tasks from a JSON file* option to show the *Import Tasks* page. On the *Import Tasks* page, you have to import from a valid JSON file. You can either manually draft the file or export the file from the *Tasks Export* page. .Tasks Utils Import page -image::{dataflow-asciidoc}/images/dataflow-tasks-utils-import.png[Tasks Utils Import, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-tasks-utils-import.png[Tasks Utils Import, scaledwidth="100%"] After importing the file, you get confirmation on whether the operation completed successfully. .Tasks Utils Import Result page -image::{dataflow-asciidoc}/images/dataflow-tasks-utils-import-result.png[Tasks Utils Import Result, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-tasks-utils-import-result.png[Tasks Utils Import Result, scaledwidth="100%"] @@ -361,14 +361,14 @@ Finally, you can clean up one or more task executions. This operation removes an The following image shows the *Executions* tab: .List of Task Executions -image::{dataflow-asciidoc}/images/dataflow-task-executions-list.png[List of Task Executions, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-task-executions-list.png[List of Task Executions, scaledwidth="100%"] [[dashboard-tasks-execution-detail]] === Execution Detail For each task execution on the *Task Executions* tab, you can retrieve detailed information about a specific execution by clicking the *Execution ID* of the task execution. -image::{dataflow-asciidoc}/images/dataflow-task-execution-detail.png[List of Task Executions, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-task-execution-detail.png[List of Task Executions, scaledwidth="100%"] On this screen, you can view not only the information from the task executions page but also: @@ -391,7 +391,7 @@ Additionally, you can trigger the following operations: To submit a stop task execution request to the platform, click the drop down button next to the task execution that needs to be stopped. Now click the *Stop task* option. The dashboard presents a dialog box asking if you are sure that you want to stop the task execution. If so, click `Stop Task Execution(s)`. -image::{dataflow-asciidoc}/images/dataflow-task-execution-stop.png[Stop Executing Tasks, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-task-execution-stop.png[Stop Executing Tasks, scaledwidth="100%"] NOTE: Child Spring Cloud Task applications launched via Spring Batch applications that use remote partitioning are not stopped. @@ -417,7 +417,7 @@ NOTE: Clicking the stop button actually sends a stop request to the running job, The following image shows the *Jobs* tab: .List of Job Executions -image::{dataflow-asciidoc}/images/dataflow-job-executions-list.png[List of Job Executions, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-job-executions-list.png[List of Job Executions, scaledwidth="100%"] @@ -429,7 +429,7 @@ After you have launched a batch job, the Job Execution Details page shows inform The following image shows the Job Execution Details page: .Job Execution Details -image::{dataflow-asciidoc}/images/dataflow-jobs-job-execution-details.png[Job Execution Details, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-jobs-job-execution-details.png[Job Execution Details, scaledwidth="100%"] The Job Execution Details page contains a list of the executed steps. You can further drill into the details of each step's execution by clicking the magnifying glass icon. @@ -444,7 +444,7 @@ The Step Execution Details page provides information about an individual step wi The following image shows the Step Execution Details page: .Step Execution Details -image::{dataflow-asciidoc}/images/dataflow-step-execution-history.png[Step Execution History, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-step-execution-history.png[Step Execution History, scaledwidth="100%"] The Step Execution Details screen provides a complete list of all Step Execution Context key-value pairs. @@ -459,7 +459,27 @@ When that happens, check the server log files for further details. [[dashboard-job-executions-steps-progress]] === Step Execution History -Under *Step Execution History*, you can also view various metrics associated with the selected step, such as duration, read counts, write counts, and others. +Under *Step Execution History*, you can also view various metrics associated with the selected step, such as duration, read counts, write counts, and others across all of its executions. +For each metric there are 5 attributes: + +* Count - The number of step executions that the metric could have participated. It is not a count for the number of times the event occurred during each step execution. +* Min - The minimum value for the metric across all the executions for this step. +* Max - The maximum value for the metric across all the executions for this step. +* Mean - The mean value for the metric across all the executions for this step. +* Standard Deviation - The standard deviation for the metric across all the executions for this step. + +The Step Execution contains the following metrics: + +* Commit Count - The max, min, mean, and standard deviation for the number of commits of all the executions for the given step. +* Duration - The max, min, mean, and standard deviation for the duration of all the executions for the given step. +* Duration Per Read - The max, min, mean, and standard deviation for the duration per read of all the executions for the given step. +* FilterCount - The max, min, mean, and standard deviation for the number of filters of all the executions for the given step. +* Process Skip Count - The max, min, mean, and standard deviation for the process skips of all the executions for the given step. +* Read Count - The max, min, mean, and standard deviation for the number of reads of all the executions for the given step. +* Read Skip Count - The max, min, mean, and standard deviation for the number of read skips of all the executions for the given step. +* Rollback Count - The max, min, mean, and standard deviation for the number of rollbacks of all the executions for the given step. +* Write Count - The max, min, mean, and standard deviation for the number of writes of all the executions for the given step. +* Write Skip Count - The max, min, mean, and standard deviation for the number of skips of all the executions for the given step. endif::omit-tasks-docs[] @@ -490,13 +510,13 @@ are recorded for: The following image shows the Audit Records page: .List Overview of Audit Records -image::{dataflow-asciidoc}/images/dataflow-audit-records-list.png[List of available audit records, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-audit-records-list.png[List of available audit records, scaledwidth="100%"] By clicking the _show details_ icon (the "`i`" in a circle on the right), you can obtain further details regarding the auditing details: .List Details of an Audit Record -image::{dataflow-asciidoc}/images/dataflow-audit-records-details.png[Details of a single audit record, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-audit-records-details.png[Details of a single audit record, scaledwidth="100%"] Generally, auditing provides the following information: diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-kubernetes.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-kubernetes.adoc index db12c61e9b..38331658e5 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-kubernetes.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-kubernetes.adoc @@ -1,1088 +1,8 @@ [[getting-started-kubernetes]] == Getting Started - Kubernetes -https://cloud.spring.io/spring-cloud-dataflow/[Spring Cloud Data Flow] is a toolkit for building data integration and real-time data-processing pipelines. - -Pipelines consist of Spring Boot applications built with the Spring Cloud Stream or Spring Cloud Task microservice frameworks. -This makes Spring Cloud Data Flow suitable for a range of data-processing use cases, from import-export to event streaming and predictive analytics. - -This project provides support for using Spring Cloud Data Flow with Kubernetes as the runtime for these pipelines, with applications packaged as Docker images. - -See the link:https://dataflow.spring.io/docs/installation/kubernetes/[Kubernetes] section of the microsite for more information on installing Spring Cloud Data Flow on Kubernetes. +This section covers how to get started with Spring Cloud Data Flow running locally on Kubernetes. See xref:configuration-carvel[Deployment using Carvel] and xref:configuration-kubernetes[Configuration - Kubernetes] for more information on installing Spring Cloud Data Flow on Kubernetes. Once you have the Data Flow server installed on Kubernetes, you probably want to get started with orchestrating the deployment of readily available pre-built applications into a coherent streaming or batch data pipelines. We have guides to help you get started with both link:https://dataflow.spring.io/docs/stream-developer-guides/[Stream] and link:https://dataflow.spring.io/docs/batch-developer-guides/[Batch] processing. -=== Application and Server Properties - -This section covers how you can customize the deployment of your applications. You can use a number of properties to influence settings for the applications that are deployed. Properties can be applied on a per-application basis or in the appropriate server configuration for all deployed applications. - -NOTE: Properties set on a per-application basis always take precedence over properties set as the server configuration. This arrangement lets you override global server level properties on a per-application basis. - -Properties to be applied for all deployed Tasks are defined in the `src/kubernetes/server/server-config-[binder].yaml` file and for Streams in `src/kubernetes/skipper/skipper-config-[binder].yaml`. Replace `[binder]` with the messaging middleware you are using -- for example, `rabbit` or `kafka`. - -==== Memory and CPU Settings - -Applications are deployed with default memory and CPU settings. If you need to, you can adjust these values. The following example shows how to set `Limits` to `1000m` for `CPU` and `1024Mi` for memory and `Requests` to `800m` for CPU and `640Mi` for memory: - -==== -[source] ----- -deployer..kubernetes.limits.cpu=1000m -deployer..kubernetes.limits.memory=1024Mi -deployer..kubernetes.requests.cpu=800m -deployer..kubernetes.requests.memory=640Mi ----- -==== - -Those values results in the following container settings being used: - -==== -[source] ----- -Limits: - cpu: 1 - memory: 1Gi -Requests: - cpu: 800m - memory: 640Mi ----- -==== - -You can also control the default values to which to set the `cpu` and `memory` globally. - -The following example shows how to set the CPU and memory for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - limits: - memory: 640mi - cpu: 500m ----- -==== - -The following example shows how to set the CPU and memory for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - limits: - memory: 640mi - cpu: 500m ----- -==== - -The settings we have used so far affect only the settings for the container. They do not affect the memory setting for the JVM process in the container. If you would like to set JVM memory settings, you can set an environment variable to do so. See the next section for details. - -==== Environment Variables - -To influence the environment settings for a given application, you can use the `spring.cloud.deployer.kubernetes.environmentVariables` deployer property. -For example, a common requirement in production settings is to influence the JVM memory arguments. -You can do so by using the `JAVA_TOOL_OPTIONS` environment variable, as the following example shows: - -==== -[source] ----- -deployer..kubernetes.environmentVariables=JAVA_TOOL_OPTIONS=-Xmx1024m ----- -==== - -NOTE: The `environmentVariables` property accepts a comma-delimited string. If an environment variable contains a value -that is also a comma-delimited string, it must be enclosed in single quotation marks -- for example, -`spring.cloud.deployer.kubernetes.environmentVariables=spring.cloud.stream.kafka.binder.brokers='somehost:9092, -anotherhost:9093'` - -This overrides the JVM memory setting for the desired `` (replace `` with the name of your application). - -[[getting-started-kubernetes-probes]] -==== Liveness and Readiness Probes - -The `liveness` and `readiness` probes use paths called `/health` and `/info`, respectively. They use a `delay` of `10` for both and a `period` of `60` and `10` respectively. You can change these defaults when you deploy the stream by using deployer properties. The liveness and readiness probes are applied only to streams. - -The following example changes the `liveness` probe (replace `` with the name of your application) by setting deployer properties: - -==== -[source] ----- -deployer..kubernetes.livenessProbePath=/health -deployer..kubernetes.livenessProbeDelay=120 -deployer..kubernetes.livenessProbePeriod=20 ----- -==== - -You can declare the same as part of the server global configuration for streams, as the following example shows: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - livenessProbePath: /health - livenessProbeDelay: 120 - livenessProbePeriod: 20 ----- -==== - -Similarly, you can swap `liveness` for `readiness` to override the default `readiness` settings. - -By default, port 8080 is used as the probe port. You can change the defaults for both `liveness` and `readiness` probe ports by using deployer properties, as the following example shows: - -==== -[source] ----- -deployer..kubernetes.readinessProbePort=7000 -deployer..kubernetes.livenessProbePort=7000 ----- -==== - -You can declare the same as part of the global configuration for streams, as the following example shows: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - readinessProbePort: 7000 - livenessProbePort: 7000 ----- -==== - -[NOTE] -===== -By default, the `liveness` and `readiness` probe paths use Spring Boot 2.x+ actuator endpoints. To use Spring Boot 1.x actuator endpoint paths, you must adjust the `liveness` and `readiness` values, as the following example shows (replace `` with the name of your application): - -==== -[source] ----- -deployer..kubernetes.livenessProbePath=/health -deployer..kubernetes.readinessProbePath=/info ----- -==== - -To automatically set both `liveness` and `readiness` endpoints on a per-application basis to the default Spring Boot 1.x paths, you can set the following property: - -==== -[source] ----- -deployer..kubernetes.bootMajorVersion=1 ----- -==== - -===== - -You can access secured probe endpoints by using credentials stored in a https://kubernetes.io/docs/concepts/configuration/secret/[Kubernetes secret]. You can use an existing secret, provided the credentials are contained under the `credentials` key name of the secret's `data` block. You can configure probe authentication on a per-application basis. When enabled, it is applied to both the `liveness` and `readiness` probe endpoints by using the same credentials and authentication type. Currently, only `Basic` authentication is supported. - -To create a new secret: - -. Generate the base64 string with the credentials used to access the secured probe endpoints. -+ -Basic authentication encodes a username and a password as a base64 string in the format of `username:password`. -+ -The following example (which includes output and in which you should replace `user` and `pass` with your values) shows how to generate a base64 string: -+ -==== -[source,shell] ----- -$ echo -n "user:pass" | base64 -dXNlcjpwYXNz ----- -==== - -. With the encoded credentials, create a file (for example, `myprobesecret.yml`) with the following contents: -+ -==== -[source] ----- -apiVersion: v1 -kind: Secret -metadata: - name: myprobesecret -type: Opaque -data: - credentials: GENERATED_BASE64_STRING ----- -==== - -. Replace `GENERATED_BASE64_STRING` with the base64-encoded value generated earlier. - -. Create the secret by using `kubectl`, as the following example shows: -+ -==== -[source,shell] ----- -$ kubectl create -f ./myprobesecret.yml -secret "myprobesecret" created ----- -==== - -. Set the following deployer properties to use authentication when accessing probe endpoints, as the following example shows: -+ -==== -[source] ----- -deployer..kubernetes.probeCredentialsSecret=myprobesecret ----- -==== -+ -Replace `` with the name of the application to which to apply authentication. - -==== Using `SPRING_APPLICATION_JSON` - -You can use a `SPRING_APPLICATION_JSON` environment variable to set Data Flow server properties (including the configuration of Maven repository settings) that are common across all of the Data Flow server implementations. These settings go at the server level in the container `env` section of a deployment YAML. The following example shows how to do so: - -==== -[source,options=nowrap] ----- -env: -- name: SPRING_APPLICATION_JSON - value: "{ \"maven\": { \"local-repository\": null, \"remote-repositories\": { \"repo1\": { \"url\": \"https://repo.spring.io/libs-snapshot\"} } } }" ----- -==== - -==== Private Docker Registry - -You can pull Docker images from a private registry on a per-application basis. First, you must create a secret in the cluster. Follow the https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/[Pull an Image from a Private Registry] guide to create the secret. - -Once you have created the secret, you can use the `imagePullSecret` property to set the secret to use, as the following example shows: - -==== -[source] ----- -deployer..kubernetes.imagePullSecret=mysecret ----- -==== - -Replace `` with the name of your application and `mysecret` with the name of the secret you created earlier. - -You can also configure the image pull secret at the global server level. - -The following example shows how to do so for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - imagePullSecret: mysecret ----- -==== - -The following example shows how to do so for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - imagePullSecret: mysecret ----- -==== - -Replace `mysecret` with the name of the secret you created earlier. - -==== Annotations - -You can add annotations to Kubernetes objects on a per-application basis. The supported object types are pod `Deployment`, `Service`, and `Job`. Annotations are defined in a `key:value` format, allowing for multiple annotations separated by a comma. For more information and use cases on annotations, see https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/[Annotations]. - -The following example shows how you can configure applications to use annotations: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.podAnnotations=annotationName:annotationValue -deployer..kubernetes.serviceAnnotations=annotationName:annotationValue,annotationName2:annotationValue2 -deployer..kubernetes.jobAnnotations=annotationName:annotationValue ----- -==== - -Replace `` with the name of your application and the value of your annotations. - -==== Entry Point Style - -An entry point style affects how application properties are passed to the container to be deployed. Currently, three styles are supported: - -* `exec` (default): Passes all application properties and command line arguments in the deployment request as container arguments. Application properties are transformed into the format of `--key=value`. -* `shell`: Passes all application properties and command line arguments as environment variables. Each of the applicationor command-line argument properties is transformed into an uppercase string and `.` characters are replaced with `_`. -* `boot`: Creates an environment variable called `SPRING_APPLICATION_JSON` that contains a JSON representation of all application properties. Command line arguments from the deployment request are set as container args. - -NOTE: In all cases, environment variables defined at the server-level configuration and on a per-application basis are sent on to the container as is. - -You can configure an application as follows: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.entryPointStyle= ----- -==== - -Replace `` with the name of your application and `` with your desired entry point style. - -You can also configure the entry point style at the global server level. - -The following example shows how to do so for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - entryPointStyle: entryPointStyle ----- -==== - -The following example shows how to do so for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - entryPointStyle: entryPointStyle ----- -==== - -Replace `entryPointStyle` with the desired entry point style. - -You should choose an Entry Point Style of either `exec` or `shell`, to correspond to how the `ENTRYPOINT` syntax is defined in the container's `Dockerfile`. For more information and uses cases on `exec` versus `shell`, see the https://docs.docker.com/engine/reference/builder/#entrypoint[ENTRYPOINT] section of the Docker documentation. - -Using the `boot` entry point style corresponds to using the `exec` style `ENTRYPOINT`. Command line arguments from the deployment request are passed to the container, with the addition of application properties being mapped into the `SPRING_APPLICATION_JSON` environment variable rather than command line arguments. - -NOTE: When you use the `boot` Entry Point Style, the `deployer..kubernetes.environmentVariables` property must not contain `SPRING_APPLICATION_JSON`. - -==== Deployment Service Account - -You can configure a custom service account for application deployments through properties. You can use an existing service account or create a new one. One way to create a service account is by using `kubectl`, as the following example shows: - -==== -[source,shell] ----- -$ kubectl create serviceaccount myserviceaccountname -serviceaccount "myserviceaccountname" created ----- -==== - -Then you can configure individual applications as follows: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.deploymentServiceAccountName=myserviceaccountname ----- -==== - -Replace `` with the name of your application and `myserviceaccountname` with your service account name. - -You can also configure the service account name at the global server level. - -The following example shows how to do so for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - deploymentServiceAccountName: myserviceaccountname ----- -==== - -The following example shows how to do so for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - deploymentServiceAccountName: myserviceaccountname ----- -==== - -Replace `myserviceaccountname` with the service account name to be applied to all deployments. - -==== Image Pull Policy - -An image pull policy defines when a Docker image should be pulled to the local registry. Currently, three policies are supported: - -* `IfNotPresent` (default): Do not pull an image if it already exists. -* `Always`: Always pull the image regardless of whether it already exists. -* `Never`: Never pull an image. Use only an image that already exists. - -The following example shows how you can individually configure applications: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.imagePullPolicy=Always ----- -==== - -Replace `` with the name of your application and `Always` with your desired image pull policy. - -You can configure an image pull policy at the global server level. - -The following example shows how to do so for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - imagePullPolicy: Always ----- -==== - -The following example shows how to do so for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - imagePullPolicy: Always ----- -==== - -Replace `Always` with your desired image pull policy. - -==== Deployment Labels - -You can set custom labels on objects related to https://kubernetes.io/docs/concepts/workloads/controllers/deployment/[Deployment]. See https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/[Labels] for more information on labels. Labels are specified in `key:value` format. - -The following example shows how you can individually configure applications: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.deploymentLabels=myLabelName:myLabelValue ----- -==== - -Replace `` with the name of your application, `myLabelName` with your label name, and `myLabelValue` with the value of your label. - -Additionally, you can apply multiple labels, as the following example shows: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.deploymentLabels=myLabelName:myLabelValue,myLabelName2:myLabelValue2 ----- -==== - -==== Tolerations - -Tolerations work with taints to ensure pods are not scheduled onto particular nodes. -Tolerations are set into the pod configuration while taints are set onto nodes. -See the https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/[Taints and Tolerations] section of the Kubernetes reference for more information. - -The following example shows how you can individually configure applications: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.tolerations=[{key: 'mykey', operator: 'Equal', value: 'myvalue', effect: 'NoSchedule'}] ----- -==== - -Replace `` with the name of your application and the key-value pairs according to your desired toleration configuration. - -You can configure tolerations at the global server level as well. - -The following example shows how to do so for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - tolerations: - - key: mykey - operator: Equal - value: myvalue - effect: NoSchedule ----- -==== - -The following example shows how to do so for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - tolerations: - - key: mykey - operator: Equal - value: myvalue - effect: NoSchedule ----- -==== - -Replace the `tolerations` key-value pairs according to your desired toleration configuration. - -==== Secret References - -Secrets can be referenced and their entire data contents can be decoded and inserted into the pod environment as individual variables. -See the https://kubernetes.io/docs/tasks/inject-data-application/distribute-credentials-secure/#configure-all-key-value-pairs-in-a-secret-as-container-environment-variables[Configure all key-value pairs in a Secret as container environment variables] section of the Kubernetes reference for more information. - -The following example shows how you can individually configure applications: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.secretRefs=testsecret ----- -==== - -You can also specify multiple secrets, as follows: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.secretRefs=[testsecret,anothersecret] ----- -==== - -Replace `` with the name of your application and the `secretRefs` attribute with the appropriate values for your application environment and secret. - -You can configure secret references at the global server level as well. - -The following example shows how to do so for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - secretRefs: - - testsecret - - anothersecret ----- -==== - -The following example shows how to do so for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - secretRefs: - - testsecret - - anothersecret ----- -==== - -Replace the items of `secretRefs` with one or more secret names. - -==== Secret Key References - -Secrets can be referenced and their decoded value can be inserted into the pod environment. -See the https://kubernetes.io/docs/concepts/configuration/secret/#using-secrets-as-environment-variables[Using Secrets as Environment Variables] section of the Kubernetes reference for more information. - -The following example shows how you can individually configure applications: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.secretKeyRefs=[{envVarName: 'MY_SECRET', secretName: 'testsecret', dataKey: 'password'}] ----- -==== - -Replace `` with the name of your application and the `envVarName`, `secretName`, and `dataKey` attributes with the appropriate values for your application environment and secret. - -You can configure secret key references at the global server level as well. - -The following example shows how to do so for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - secretKeyRefs: - - envVarName: MY_SECRET - secretName: testsecret - dataKey: password ----- -==== - -The following example shows how to do so for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - secretKeyRefs: - - envVarName: MY_SECRET - secretName: testsecret - dataKey: password ----- -==== - -Replace the `envVarName`, `secretName`, and `dataKey` attributes with the appropriate values for your secret. - -==== ConfigMap References - -A ConfigMap can be referenced and its entire data contents can be decoded and inserted into the pod environment as individual variables. -See the https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#configure-all-key-value-pairs-in-a-configmap-as-container-environment-variables[Configure all key-value pairs in a ConfigMap as container environment variables] section of the Kubernetes reference for more information. - -The following example shows how you can individually configure applications: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.configMapRefs=testcm ----- -==== - -You can also specify multiple ConfigMap instances, as follows: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.configMapRefs=[testcm,anothercm] ----- -==== - -Replace `` with the name of your application and the `configMapRefs` attribute with the appropriate values for your application environment and ConfigMap. - -You can configure ConfigMap references at the global server level as well. - -The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - configMapRefs: - - testcm - - anothercm ----- -==== - -The following example shows how to do so for tasks by editing the `server-config.yaml` file: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - configMapRefs: - - testcm - - anothercm ----- -==== - -Replace the items of `configMapRefs` with one or more secret names. - -==== ConfigMap Key References - -A ConfigMap can be referenced and its associated key value inserted into the pod environment. -See the https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#define-container-environment-variables-using-configmap-data[Define container environment variables using ConfigMap data] section of the Kubernetes reference for more information. - -The following example shows how you can individually configure applications: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.configMapKeyRefs=[{envVarName: 'MY_CM', configMapName: 'testcm', dataKey: 'platform'}] ----- -==== - -Replace `` with the name of your application and the `envVarName`, `configMapName`, and `dataKey` attributes with the appropriate values for your application environment and ConfigMap. - -You can configure ConfigMap references at the global server level as well. - -The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - configMapKeyRefs: - - envVarName: MY_CM - configMapName: testcm - dataKey: platform ----- -==== - -The following example shows how to do so for tasks by editing the `server-config.yaml` file: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - configMapKeyRefs: - - envVarName: MY_CM - configMapName: testcm - dataKey: platform ----- -==== - -Replace the `envVarName`, `configMapName`, and `dataKey` attributes with the appropriate values for your ConfigMap. - -==== Pod Security Context - -You can confiure the pod security context to run processes under the specified UID (user ID) or GID (group ID). -This is useful when you want to not run processes under the default `root` UID and GID. -You can define either the `runAsUser` (UID) or `fsGroup` (GID), and you can configure them to work together. -See the https://kubernetes.io/docs/tasks/configure-pod-container/security-context/[Security Context] section of the Kubernetes reference for more information. - -The following example shows how you can individually configure application pods: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.podSecurityContext={runAsUser: 65534, fsGroup: 65534} ----- -==== - -Replace `` with the name of your application and the `runAsUser` and/or `fsGroup` attributes with the appropriate values for your container environment. - -You can configure the pod security context at the global server level as well. - -The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - podSecurityContext: - runAsUser: 65534 - fsGroup: 65534 ----- -==== - -The following example shows how to do so for tasks by editing the `server-config.yaml` file: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - podSecurityContext: - runAsUser: 65534 - fsGroup: 65534 ----- -==== - -Replace the `runAsUser` and/or `fsGroup` attributes with the appropriate values for your container environment. - -==== Service Ports - -When you deploy applications, a kubernetes Service object is created with a default port of `8080`. If the `server.port` property is set, it overrides the default port value. You can add additional ports to the Service object on a per-application basis. You can add multiple ports with a comma delimiter. - -The following example shows how you can configure additional ports on a Service object for an application: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.servicePorts=5000 -deployer..kubernetes.servicePorts=5000,9000 ----- -==== - -Replace `` with the name of your application and the value of your ports. - -==== StatefulSet Init Container - -When deploying an application by using a StatefulSet, an Init Container is used to set the instance index in the pod. -By default, the image used is `busybox`, which you can be customize. - -The following example shows how you can individually configure application pods: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.statefulSetInitContainerImageName=myimage:mylabel ----- -==== - -Replace `` with the name of your application and the `statefulSetInitContainerImageName` attribute with the appropriate value for your environment. - -You can configure the StatefulSet Init Container at the global server level as well. - -The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - statefulSetInitContainerImageName: myimage:mylabel ----- -==== - -The following example shows how to do so for tasks by editing the `server-config.yaml` file: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - statefulSetInitContainerImageName: myimage:mylabel ----- -==== - -Replace the `statefulSetInitContainerImageName` attribute with the appropriate value for your environment. - -==== Init Containers - -When you deploy applications, you can set a custom Init Container on a per-application basis. -Refer to the https://kubernetes.io/docs/concepts/workloads/pods/init-containers/[Init Containers] section of the Kubernetes reference for more information. - -The following example shows how you can configure an Init Container for an application: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.initContainer={containerName: 'test', imageName: 'busybox:latest', commands: ['sh', '-c', 'echo hello']} ----- -==== - -Replace `` with the name of your application and set the values of the `initContainer` attributes appropriate for your Init Container. - -==== Lifecycle Support - -When you deploy applications, you may attach `postStart` and `preStop` https://kubernetes.io/docs/tasks/configure-pod-container/attach-handler-lifecycle-event/[Lifecycle handlers] to execute commands. -The Kubernetes API supports other types of handlers besides `exec`. This feature may be extended to support additional actions in a future release. -To configure the Lifecycle handlers as shown in the linked page above,specify each command as a comma-delimited list, using the following property keys: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.lifecycle.postStart.exec.command=/bin/sh,-c,'echo Hello from the postStart handler > /usr/share/message' -deployer..kubernetes.lifecycle.preStop.exec.command=/bin/sh,-c,'nginx -s quit; while killall -0 nginx; do sleep 1; done' ----- -==== - -==== Additional Containers - -When you deploy applications, you may need one or more containers to be deployed along with the main container. -This would allow you to adapt some deployment patterns such as sidecar, adapter in case of multi container pod setup. - -The following example shows how you can configure additional containers for an application: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.additionalContainers=[{name: 'c1', image: 'busybox:latest', command: ['sh', '-c', 'echo hello1'], volumeMounts: [{name: 'test-volume', mountPath: '/tmp', readOnly: true}]},{name: 'c2', image: 'busybox:1.26.1', command: ['sh', '-c', 'echo hello2']}] ----- -==== +We have prepared scripts to simplify the process of creating a local Minikube or Kind cluster, or to use a remote cluster like GKE or TKG, more at xref:local-k8s-development[Configure Kubernetes for Local Development] diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-local.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-local.adoc index a7ea932c3c..698cadf4d3 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-local.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-local.adoc @@ -9,6 +9,6 @@ You can find a gentle introduction to Spring Cloud Data Flow along with installa We then build an introductory Spring Cloud Data Flow application, discussing some core principles as we go. -- -See the link:https://dataflow.spring.io/docs/installation/local/[Local Machine] section of the microsite for more information on setting up docker compose and manual installation. +This section covers how to get started with Spring Cloud Data Flow running locally on Docker Compose. See the link:https://dataflow.spring.io/docs/installation/local/[Local Machine] section of the microsite for more information on installing Spring Cloud Data Flow on Docker Compose. Once you have the Data Flow server installed locally, you probably want to get started with orchestrating the deployment of readily available pre-built applications into coherent streaming or batch data pipelines. We have guides to help you get started with both link:https://dataflow.spring.io/docs/stream-developer-guides/[Stream] and link:https://dataflow.spring.io/docs/batch-developer-guides/[Batch] processing. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started.adoc index bc26a483ab..eb1196ebf5 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started.adoc @@ -1,6 +1,8 @@ [[getting-started]] = Getting Started +NOTE: This version of Spring Cloud Data Flow provides: <> + include::getting-started-local.adoc[] include::getting-started-cloudfoundry.adoc[] include::getting-started-kubernetes.adoc[] diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/index.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/index.adoc index dca2881674..86c465f988 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/index.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/index.adoc @@ -1,5 +1,5 @@ = Spring Cloud Data Flow Reference Guide -Sabby Anandan; Marius Bogoevici; Eric Bottard; Mark Fisher; Ilayaperumal Gopinathan; Gunnar Hillert; Mark Pollack; Patrick Peralta; Glenn Renfro; Thomas Risberg; Dave Syer; David Turanski; Janne Valkealahti; Oleg Zhurakousky; Jay Bryant; Vinicius Carvalho; Chris Schaefer; Damien Vitrac; Christian Tzolov +Sabby Anandan; Marius Bogoevici; Eric Bottard; Mark Fisher; Ilayaperumal Gopinathan; Gunnar Hillert; Mark Pollack; Patrick Peralta; Glenn Renfro; Thomas Risberg; Dave Syer; David Turanski; Janne Valkealahti; Oleg Zhurakousky; Jay Bryant; Vinicius Carvalho; Chris Schaefer; Damien Vitrac; Christian Tzolov; Claudia Bressi; Chris Bono; Corneil du Plesis :doctype: book :toc: left :toclevels: 4 @@ -8,36 +8,29 @@ Sabby Anandan; Marius Bogoevici; Eric Bottard; Mark Fisher; Ilayaperumal Gopinat :hide-uri-scheme: :docinfo: shared -:spring-cloud-dataflow-docs: https://docs.spring.io/spring-cloud-dataflow/docs/{project-version}/reference -:spring-cloud-dataflow-docs-current: https://docs.spring.io/spring-cloud-dataflow/docs/current-SNAPSHOT/reference/html/ -:spring-cloud-stream-docs: https://docs.spring.io/spring-cloud-stream/docs/current/reference/htmlsingle/index.html -:spring-boot-version: 2.1.1.RELEASE -:spring-cloud-task-version: 2.0.0.RELEASE -:spring-batch-version: 4.1.0.RELEASE -:spring-batch-doc-version: 4.1.x -:composed-task-version: 2.1.0.RELEASE -:spring-boot-docs-reference: https://docs.spring.io/spring-boot/docs/2.1.1.RELEASE/reference -:scs-app-starters-docs: https://docs.spring.io/spring-cloud-stream-app-starters/docs/current/reference/html -:scs-app-starters-docs-htmlsingle: https://docs.spring.io/spring-cloud-stream-app-starters/docs/current/reference/htmlsingle :github-repo: spring-cloud/spring-cloud-dataflow :github-code: https://github.com/{github-repo} :microsite-version: master - -:dataflow-asciidoc: https://raw.githubusercontent.com/spring-cloud/spring-cloud-dataflow/master/spring-cloud-dataflow-docs/src/main/asciidoc - -:docker-http-source-rabbit-version: 2.1.0.RELEASE -:docker-time-source-rabbit-version: 2.1.0.RELEASE -:docker-log-sink-rabbit-version: 2.1.0.RELEASE -:docker-log-sink-kafka-version: 2.1.0.RELEASE -:docker-http-source-kafka-version: 2.1.0.RELEASE -:docker-time-source-kafka-version: 2.1.0.RELEASE -:docker-timestamp-task-version: 2.0.0.RELEASE +:spring-boot-version: 2.7.14 +:spring-cloud-task-version: 2.4.6 +:spring-batch-version: 4.3.8 +:spring-boot-docs: https://docs.spring.io/spring-boot/docs/{spring-boot-version}/reference/htmlsingle +:scs-stream-apps-docs: https://docs.spring.io/stream-applications/docs/current/reference/html +:dataflow-asciidoc-images: https://raw.githubusercontent.com/spring-cloud/spring-cloud-dataflow/main/spring-cloud-dataflow-docs/src/main/asciidoc/images + +:docker-http-source-rabbit-version: 3.2.1 +:docker-time-source-rabbit-version: 3.2.1 +:docker-log-sink-rabbit-version: 3.2.1 +:docker-log-sink-kafka-version: 3.2.1 +:docker-http-source-kafka-version: 3.2.1 +:docker-time-source-kafka-version: 3.2.1 +:docker-timestamp-task-version: 2.0.2 ifdef::backend-html5[] Version {project-version} -(C) 2012-2020 Pivotal Software, Inc. +(C) 2012-2021 VMware, Inc. All rights reserved. _Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically._ @@ -47,6 +40,8 @@ endif::backend-html5[] include::preface.adoc[] +include::overview.adoc[] + include::getting-started.adoc[] include::applications.adoc[] @@ -71,8 +66,6 @@ include::tasks-monitoring.adoc[] include::dashboard.adoc[] -include::samples.adoc[] - include::api-guide.adoc[] include::appendix.adoc[] diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/overview.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/overview.adoc new file mode 100644 index 0000000000..765723c43a --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/overview.adoc @@ -0,0 +1,8 @@ +[[overview]] += Overview + +Microservice based Streaming and Batch data processing for Cloud Foundry and Kubernetes. + +Spring Cloud Data Flow provides tools to create complex topologies for streaming and batch data pipelines. The data pipelines consist of Spring Boot apps, built using the Spring Cloud Stream or Spring Cloud Task microservice frameworks. + +Spring Cloud Data Flow supports a range of data processing use cases, from ETL to import/export, event streaming, and predictive analytics. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/preface.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/preface.adoc index fd280d8c66..988cabaabb 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/preface.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/preface.adoc @@ -3,10 +3,6 @@ [[dataflow-documentation-about]] == About the documentation -The documentation for this release is available in {spring-cloud-dataflow-docs}/htmlsingle[HTML]. - -The latest copy of the Spring Cloud Data Flow reference guide can be found {spring-cloud-dataflow-docs-current}[here]. - Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in @@ -18,8 +14,8 @@ Having trouble with Spring Cloud Data Flow? We would like to help! * Ask a question. We monitor https://stackoverflow.com[stackoverflow.com] for questions tagged with https://stackoverflow.com/tags/spring-cloud-dataflow[`spring-cloud-dataflow`]. -* Report bugs with Spring Cloud Data Flow at https://github.com/spring-cloud/spring-cloud-dataflow/issues. -* Chat with the community and developers on https://gitter.im/spring-cloud/spring-cloud-dataflow[Gitter]. +* Report bugs with Spring Cloud Data Flow at https://github.com/{github-repo}/issues. +* Review the latest release notes at https://github.com/{github-repo}/releases. NOTE: All of Spring Cloud Data Flow is open source, including the documentation! If you find problems with the docs or if you just want to improve them, please {github-code}[get involved]. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/samples.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/samples.adoc deleted file mode 100644 index e254efb70a..0000000000 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/samples.adoc +++ /dev/null @@ -1,36 +0,0 @@ -[[dataflow-samples]] -= Samples - -[partintro] --- -This section shows the available samples. --- - -[[samples-links]] -== Links -Several samples have been created to help you get started on implementing higher-level use cases than the basic Streams and Tasks shown in the reference guide. -The samples are part of a separate https://github.com/spring-cloud/spring-cloud-dataflow-samples[repository] and have their own https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/[reference documentation]. - -The following samples are available: - -.General -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_java_dsl[Java DSL] -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#spring-cloud-data-flow-samples-http-cassandra-overview[HTTP to Cassandra] -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_http_to_mysql_demo[HTTP to MySQL] -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_http_to_gemfire_demo[HTTP to Gemfire] -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_gemfire_cq_to_log_demo[Gemfire CQ to Log Demo] -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_gemfire_to_log_demo[Gemfire to Log Demo] -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_custom_spring_cloud_stream_processor[Custom Processor] - -.Task and Batch -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_batch_job_on_cloud_foundry[Batch Job on Cloud Foundry] -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_batch_file_ingest[Batch File Ingest] - -.Data Science -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_species_prediction[Species Prediction] - -.Functions -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_functions_in_spring_cloud_data_flow[Using Spring Cloud Function] - -{sp}+ -{sp}+ diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/shell.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/shell.adoc index b0697b2eb9..7bfa9faec2 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/shell.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/shell.adoc @@ -58,16 +58,14 @@ The following listing shows the output of the `help` command: ==== [source,bash] ---- -! - Allows execution of operating system (OS) commands -clear - Clears the console -cls - Clears the console -date - Displays the local date and time -exit - Exits the shell -http get - Make GET request to http endpoint -http post - POST data to http endpoint -quit - Exits the shell -system properties - Shows the shells properties {JB - restore the apostrophe} -version - Displays shell version +Built-In Commands + help: Display help about available commands + stacktrace: Display the full stacktrace of the last error. + clear: Clear the shell screen. + quit, exit: Exit the shell. + history: Display or save the history of previously run commands + version: Show version info + script: Read and execute commands from a file. ---- ==== @@ -77,26 +75,28 @@ Adding the name of the command to `help` shows additional information on how to [source,bash] ---- dataflow:>help stream create -Keyword: stream create -Description: Create a new stream definition - Keyword: ** default ** - Keyword: name - Help: the name to give to the stream - Mandatory: true - Default if specified: '__NULL__' - Default if unspecified: '__NULL__' +NAME + stream create - Create a new stream definition - Keyword: definition - Help: a stream definition, using the DSL (e.g. "http --port=9000 | hdfs") - Mandatory: true - Default if specified: '__NULL__' - Default if unspecified: '__NULL__' +SYNOPSIS + stream create [--name String] [--definition String] --description String --deploy boolean - Keyword: deploy - Help: whether to deploy the stream immediately - Mandatory: false - Default if specified: 'true' - Default if unspecified: 'false' +OPTIONS + --name String + the name to give to the stream + [Mandatory] + + --definition String + a stream definition, using the DSL (e.g. "http --port=9000 | hdfs") + [Mandatory] + + --description String + a short description about the stream + [Optional] + + --deploy boolean + whether to deploy the stream immediately + [Optional, default = false] ---- ==== @@ -109,7 +109,7 @@ You can complete the shell command options in the shell by pressing the `TAB` ke [source,bash] ---- dataflow:>stream create -- -stream create --definition stream create --name +--definition --deploy --description --name ---- ==== @@ -272,7 +272,7 @@ Many applications accept options that are to be interpreted as SpEL expressions, * Literals can be enclosed in either single or double quotes. * Quotes need to be doubled to embed a literal quote. Single quotes inside double quotes need no special treatment, and the reverse is also true. -As a last example, assume you want to use the link:${scs-app-starters-docs}/spring-cloud-stream-modules-processors.html#spring-clound-stream-modules-transform-processor[transform processor]. +As a last example, assume you want to use the link:${scs-stream-apps-docs}/spring-cloud-stream-modules-transform-processor[transform processor]. This processor accepts an `expression` option which is a SpEL expression. It is to be evaluated against the incoming message, with a default of `payload` (which forwards the message payload untouched). It is important to understand that the following statements are equivalent: diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/spring-boot-3x.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/spring-boot-3x.adoc new file mode 100644 index 0000000000..62a797cee2 --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/spring-boot-3x.adoc @@ -0,0 +1,101 @@ +[appendix] +[[appendix-boot3]] +== Spring Boot 3.x Support + +=== Stream Applications +Spring Cloud Data Flow supports both Spring Boot `2.x` and `3.x` based Stream applications. + +==== Differences in 3.x +Be aware of the following areas that have changed across versions. + +===== Metrics Configuration Properties +IMPORTANT: The following does **NOT** apply when configuring metrics for the Dataflow or Skipper server as they both run on Spring Boot 2.x. It is only applicable to applications managed by Dataflow. + +The naming of the metrics registry-specific properties differ as follows: + +* `2.x`: `management.metrics.export..` +* `3.x`: `management..metrics.export.` + +.Example: +* `2.x`: `management.metrics.export.prometheus.enabled=true` +* `3.x`: `management.prometheus.metrics.export.enabled=true` + +NOTE: One exception to this rule is the Prometheus RSocket Proxy which still runs on Spring Boot `2.x` and therefore expects the properties in the `management.metrics.export.prometheus.rsocket.*` format. + +Be sure that you use the `2.x` format when configuring `2.x` based stream apps and the `3.x` format when configuring `3.x` based stream apps. + +===== Dataflow Metrics Property Replication +By default, Dataflow replicates relevant metrics properties that it has been configured with to all launched stream and task applications. +This replication has been updated to target both the `2.x` and `3.x` expected formats. +In other words, if your `2.x` stream apps are currently inheriting the Dataflow metrics configuration, they will continue to do so for your `3.x` stream apps. + + +==== Pre-packaged Stream Applications +The default <> are based on Spring Boot `2.x`. +To use the latest pre-packaged apps based on Spring Boot `3.x`, you must manually register the apps (relevant coordinates below). + +[cols="a"] +[cols="40%"] +|=== +|[.small]#Stream Applications# + +|[.small]#https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/stream-applications-descriptor/2022.0.0/stream-applications-descriptor-2022.0.0.stream-apps-rabbit-maven[RabbitMQ + Maven]# +|[.small]#https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/stream-applications-descriptor/2022.0.0/stream-applications-descriptor-2022.0.0.stream-apps-rabbit-docker[RabbitMQ + Docker]# +|[.small]#https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/stream-applications-descriptor/2022.0.0/stream-applications-descriptor-2022.0.0.stream-apps-kafka-maven[Kafka + Maven]# +|[.small]#https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/stream-applications-descriptor/2022.0.0/stream-applications-descriptor-2022.0.0.stream-apps-kafka-docker[Kafka + Docker]# +|=== + +[cols="a"] +[cols="40%"] +|=== +|[.small]#HTTP Repository Location for Apps# + +|[.small]#https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/stream-applications-descriptor/2022.0.0/stream-applications-descriptor-2022.0.0.rabbit-apps-maven-repo-url.properties[RabbitMQ]# +|[.small]#https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/stream-applications-descriptor/2022.0.0/stream-applications-descriptor-2022.0.0.kafka-apps-maven-repo-url.properties[Kafka]# +|=== + +=== Spring Cloud Task / Batch Applications + +The database schemas for Spring Cloud Task 3.x and Spring Batch 5.x have been modified in the versions that forms part of Spring Boot 3.x + +Spring Cloud Data Flow will create set of tables for the Boot 3.x version that is prefixed by `BOOT3_` and will configure the `spring.cloud.task.tablePrefix` and `spring.batch.jdbc.table-prefix` with the correct values. + +In order to know that the specific task is a Boot 3.x application the version will have to be provided as part of registration. The rest endpoints accepts a `bootVersion=3` parameter and the shell commands accepts `--bootVersion 3` + +Since there are now multiple sets of tables that represents task and batch executions, each schema has been assigned a schemaTarget name. This value form part of queries when retrieving execution data. The UI takes care of this by using the embedded resource links. If you are using the REST API directly you will need to update those requests. + +==== Pre-packaged Task / Batch Applications +The default <> are based on Spring Boot `2.x`, Spring Cloud Task `2.x`, and Spring Batch `4.x`. +To use the latest pre-packaged apps based on Spring Boot `3.x`, Spring Cloud Task `3.x`, and Spring Batch `5.x`, you must manually register the apps using the properties below. + +.Maven coordinates +[.small] +[source,properties] +---- +task.timestamp=maven://io.spring:timestamp-task:3.0.0 +task.timestamp.bootVersion=3 + +task.timestamp-batch=maven://io.spring:timestamp-batch-task:3.0.0 +task.timestamp-batch.bootVersion=3 +---- +[.small] + +.Docker coordinates +[.small] +[source,properties] +---- +task.timestamp=docker:springcloudtask/timestamp-task:3.0.0 +task.timestamp.bootVersion=3 + +task.timestamp-batch=docker:springcloudtask/timestamp-batch-task:3.0.0 +task.timestamp-batch.bootVersion=3 +---- +[.small] + +TIP: The properties can be used when registering an app in the Dataflow UI or the Dataflow shell CLI. + +==== Composed Task Runner + +Composed Task Runner for Spring Cloud Data Flow 2.11.x supports the launching of both Spring Boot `3.x`/`2.x`, Spring Cloud Task `3.x`/`2.x`, and Spring Batch `5.x`/`4.x` applications. + +NOTE: When registering Task applications verify that the correct `Spring Boot Version` is selected. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/spring-cloud-stream-overview.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/spring-cloud-stream-overview.adoc deleted file mode 100644 index bc289df590..0000000000 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/spring-cloud-stream-overview.adoc +++ /dev/null @@ -1,80 +0,0 @@ - -[[spring-cloud-stream-overview]] -= Spring Cloud Stream Overview - -[partintro] --- -This section goes into more detail about how you can work with Spring Cloud Stream. It covers topics -such as creating and running stream applications. - -If you're just starting out with Spring Cloud Data Flow, you should probably read the Getting Started guide for "`<>`" , "`<>`", "`<>`" before diving into this section. --- - -== Introducing Spring Cloud Stream - -The Spring Cloud Stream project allows a user to develop and run messaging microservices using Spring Integration and run them locally or in the cloud, either as standalone apps or via Spring Cloud Data Flow. Just add `@EnableBinding` and run your app as a Spring Boot app (single application context). You just need to connect to the physical broker for the bindings, which is automatic if the relevant binder implementation is available on the classpath (e.g. Kafka or RabbitMQ). - -Here's a sample source application (output channel only): - -[source,java] ----- -@SpringBootApplication -@ComponentScan(basePackageClasses=TimerSource.class) -public class TimerSourceApplication { - - public static void main(String[] args) { - SpringApplication.run(TimerSourceApplication.class, args); - } - -} - -@Configuration -@EnableBinding(Source.class) -public class TimerSource { - - @Value("${format}") - private String format; - - @Bean - @InboundChannelAdapter(value = Source.OUTPUT, poller = @Poller(fixedDelay = "${fixedDelay}", maxMessagesPerPoll = "1")) - public MessageSource timerMessageSource() { - return () -> new GenericMessage<>(new SimpleDateFormat(format).format(new Date())); - } - -} ----- - -`@EnableBinding` is parameterized by an interface (in this case `Source`) which declares input and output channels. `Source`, `Sink` and `Processor` are provided off the shelf, but you can define others. Here's the definition of `Source`: - -[source,java] ----- -public interface Source { - @Output("output") - MessageChannel output(); -} ----- - -The `@Output` annotation is used to identify output channels (messages leaving the app) and `@Input` is used to identify input channels (messages entering the app). It is optionally parameterized by a channel name - if the name is not provided the method name is used instead. An implementation of the interface is created for you and can be used in the application context by autowiring it, e.g. into a test case: - -[source,java] ----- -@RunWith(SpringRunner.class) -@SpringBootTest(classes = TimerSourceApplication.class) -@DirtiesContext -public class TimerSourceApplicationTests { - - @Autowired - private Source source - - @Test - public void contextLoads() { - assertNotNull(this.source.output()); - } - -} ----- - -NOTE: In this case there is only one `Source` in the application context so there is no need to qualify it when it is autowired. If there is ambiguity, e.g. if you are composing one app from some others, you can use the `@Bindings` qualifier to inject a specific channel set. The `@Bindings` qualifier takes a parameter which is the class that carries the `@EnableBinding` annotation (in this case the `TimerSource`). - -For more information, including how to run `spring-cloud-stream` applications autonomously (without Spring Cloud Data Flow), -visit the link:https://cloud.spring.io/spring-cloud-stream[Spring Cloud Stream project home page]. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/streams.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/streams.adoc index 825d27cc6a..076b50f85f 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/streams.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/streams.adoc @@ -47,7 +47,7 @@ Data flows through the pipeline from left to right. In Data Flow, the Unix command is replaced by a https://cloud.spring.io/spring-cloud-stream/[Spring Cloud Stream] application and each pipe symbol represents connecting the input and output of applications over messaging middleware, such as RabbitMQ or Apache Kafka. Each Spring Cloud Stream application is registered under a simple name. -The registration process specifies where the application can be obtained (for example, in a Maven Repository or a Docker registry). You can find out more about how to register Spring Cloud Stream applications in this <>. +The registration process specifies where the application can be obtained (for example, in a Maven Repository or a Docker registry). In Data Flow, we classify the Spring Cloud Stream applications as Sources, Processors, or Sinks. As a simple example, consider the collection of data from an HTTP Source and writing to a File Sink. @@ -81,77 +81,6 @@ NOTE: When naming a stream definition, keep in mind that each application in the You can use the Stream Application DSL to define custom binding properties for each of the Spring Cloud Stream applications. See the link:https://dataflow.spring.io/docs/feature-guides/streams/stream-application-dsl/[Stream Application DSL] section of the microsite for more information. -Consider the following Java interface, which defines an input method and two output methods: - -==== -[source,java] ----- -public interface Barista { - - @Input - SubscribableChannel orders(); - - @Output - MessageChannel hotDrinks(); - - @Output - MessageChannel coldDrinks(); -} ----- -==== - -Further consider the following Java interface, which is typical for creating a Kafka Streams application: - -==== -[source,java] ----- -interface KStreamKTableBinding { - - @Input - KStream inputStream(); - - @Input - KTable inputTable(); -} ----- -==== - -In these cases with multiple input and output bindings, Data Flow cannot make any assumptions about the flow of data from one application to another. -Therefore, you need to set the binding properties to "`wire up`" the application. -The *Stream Application DSL* uses a "`double pipe`", instead of the "`pipe symbol`", to indicate that Data Flow should not configure the binding properties of the application. Think of `||` as meaning "`in parallel`". -The following example shows such a "`parallel`" definition: - -==== -[source,bash] ----- -dataflow:> stream create --definition "orderGeneratorApp || baristaApp || hotDrinkDeliveryApp || coldDrinkDeliveryApp" --name myCafeStream ----- -==== - -NOTE: Breaking Change! Versions of SCDF Local, Cloud Foundry 1.7.0 to 1.7.2 and SCDF Kubernetes 1.7.0 to 1.7.1 used the `comma` character as the separator between applications. This caused breaking changes in the traditional Stream DSL. While not ideal, changing the separator character was felt to be the best solution with the least impact on existing users. - -This stream has four applications. -`baristaApp` has two output destinations, `hotDrinks` and `coldDrinks`, intended to be consumed by the `hotDrinkDeliveryApp` and `coldDrinkDeliveryApp`, respectively. -When deploying this stream, you need to set the binding properties so that the `baristaApp` sends hot drink messages to the `hotDrinkDeliveryApp` destination and cold drink messages to the `coldDrinkDeliveryApp` destination. -The following listing does so: - -==== -[source,bash,subs=attributes] ----- -app.baristaApp.spring.cloud.stream.bindings.hotDrinks.destination=hotDrinksDest -app.baristaApp.spring.cloud.stream.bindings.coldDrinks.destination=coldDrinksDest -app.hotDrinkDeliveryApp.spring.cloud.stream.bindings.input.destination=hotDrinksDest -app.coldDrinkDeliveryApp.spring.cloud.stream.bindings.input.destination=coldDrinksDest ----- -==== - -If you want to use consumer groups, you need to set the Spring Cloud Stream application properties, `spring.cloud.stream.bindings..producer.requiredGroups` and `spring.cloud.stream.bindings..group`, on the producer and consumer applications respectively. - -Another common use case for the Stream Application DSL is to deploy a HTTP gateway application that sends a synchronous request or reply message to a Kafka or RabbitMQ application. -In this case, both the HTTP gateway application and the Kafka or RabbitMQ application can be a Spring Integration application that does not make use of the Spring Cloud Stream library. - -It is also possible to deploy only a single application using the Stream application DSL. - === Application Properties Each application takes properties to customize its behavior. As an example, the `http` source module exposes a `port` setting that lets the data ingestion port be changed from the default value: @@ -186,13 +115,13 @@ NOTE: Supported Stream `` possibilities are: `source`, `processor`, and The lifecycle of a stream goes through the following stages: -. <> -. <> -. <> -. <> or <> -. <> or <> applications in the Stream. +. Register stream definition +. Create stream using definition +. Deploy stream +. Destroy or undeploy stream +. Upgrade or rollback apps in the stream -https://cloud.spring.io/spring-cloud-skipper/[Skipper] is a server that lets you discover Spring Boot applications and manage their lifecycle on multiple Cloud Platforms. +https://spring.io/projects/spring-cloud-skipper/[Skipper] is a server that lets you discover Spring Boot applications and manage their lifecycle on multiple cloud platforms. Applications in Skipper are bundled as packages that contain the application's resource location, application properties, and deployment properties. You can think of Skipper packages as being analogous to packages found in tools such as `apt-get` or `brew`. @@ -277,22 +206,20 @@ applications built with the RabbitMQ binder, you could do the following: ==== [source,bash] ---- -dataflow:>app register --name http --type source --uri maven://org.springframework.cloud.stream.app:http-source-rabbit:1.2.1.BUILD-SNAPSHOT -dataflow:>app register --name log --type sink --uri maven://org.springframework.cloud.stream.app:log-sink-rabbit:1.2.1.BUILD-SNAPSHOT +dataflow:>app register --name http --type source --uri maven://org.springframework.cloud.stream.app:http-source-rabbit:3.2.1 +dataflow:>app register --name log --type sink --uri maven://org.springframework.cloud.stream.app:log-sink-rabbit:3.2.1 ---- ==== -If you would like to register multiple applications at one time, you can store them in a properties file, -where the keys are formatted as `.` and the values are the URIs. +If you would like to register multiple applications at one time, you can store them in a properties file, where the keys are formatted as `.` and the values are the URIs. -For example, to register the snapshot versions of the `http` and `log` -applications built with the RabbitMQ binder, you could have the following in a properties file (for example, `stream-apps.properties`): +For example, to register the snapshot versions of the `http` and `log` applications built with the RabbitMQ binder, you could have the following in a properties file (for example, `stream-apps.properties`): ==== [source,bash] ---- -source.http=maven://org.springframework.cloud.stream.app:http-source-rabbit:1.2.1.BUILD-SNAPSHOT -sink.log=maven://org.springframework.cloud.stream.app:log-sink-rabbit:1.2.1.BUILD-SNAPSHOT +source.http=maven://org.springframework.cloud.stream.app:http-source-rabbit:3.2.1 +sink.log=maven://org.springframework.cloud.stream.app:log-sink-rabbit:3.2.1 ---- ==== @@ -397,18 +324,16 @@ IMPORTANT: Only pre-registered applications can be used to `deploy`, `update`, o An attempt to update the `mysource` to version `0.0.1` (not registered) fails. [[supported-apps-and-tasks]] -==== Register Supported Applications and Tasks +==== Register Out-of-the-Box Applications and Tasks -For convenience, we have the static files with application-URIs (for both Maven and Docker) available -for all the out-of-the-box stream and task or batch app-starters. You can point to this file and import -all the application-URIs in bulk. Otherwise, as explained previously, you can register them individually or have your own -custom property file with only the required application-URIs in it. We recommend, however, having a "`focused`" -list of desired application-URIs in a custom property file. +For convenience, we have the static files with application-URIs (for both Maven and Docker) available for all the out-of-the-box stream and task applications. +You can point to this file and import all the application-URIs in bulk. +Otherwise, as explained previously, you can register them individually or have your own custom property file with only the required application-URIs in it. +We recommend, however, having a "`focused`" list of desired application-URIs in a custom property file. -===== Spring Cloud Stream App Starters - -The following table includes the `dataflow.spring.io` links to the available Stream Application Starters based on Spring Cloud Stream 2.1.x -and Spring Boot 2.1.x: +[[ootb-stream-apps]] +===== Out-of-the-Box Stream Applications +The following table includes the `dataflow.spring.io` links to the stream applications based on Spring Cloud Stream `3.2.x` and Spring Boot `2.7.x`. [width="100%",frame="topbot",options="header"] |====================== @@ -416,36 +341,29 @@ and Spring Boot 2.1.x: |RabbitMQ + Maven |https://dataflow.spring.io/rabbitmq-maven-latest -|https://dataflow.spring.io/Einstein-BUILD-SNAPSHOT-stream-applications-rabbit-maven +|https://dataflow.spring.io/rabbitmq-maven-latest-snapshot |RabbitMQ + Docker |https://dataflow.spring.io/rabbitmq-docker-latest -|https://dataflow.spring.io/Einstein-BUILD-SNAPSHOT-stream-applications-rabbit-docker +|https://dataflow.spring.io/rabbitmq-docker-latest-snapshot |Apache Kafka + Maven |https://dataflow.spring.io/kafka-maven-latest -|https://dataflow.spring.io/Einstein-BUILD-SNAPSHOT-stream-applications-kafka-maven +|https://dataflow.spring.io/kafka-maven-latest-snapshot |Apache Kafka + Docker |https://dataflow.spring.io/kafka-docker-latest -|https://dataflow.spring.io/Einstein-BUILD-SNAPSHOT-stream-applications-kafka-docker +|https://dataflow.spring.io/kafka-docker-latest-snapshot |====================== +NOTE: By default, the out-of-the-box app's actuator endpoints are secured. You can disable security by deploying streams by setting the following property: `[small]#app.*.spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration#` -NOTE: By default, App Starter actuator endpoints are secured. You can disable security by deploying streams with the -`app.*.spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration` property. On Kubernetes, see the <> section for how to configure security for actuator endpoints. -NOTE: Starting with the Spring Cloud Stream 2.1 GA release, we now have robust interoperability with the Spring Cloud Function -programming model. Building on that, with the Einstein release-train, it is now possible to pick a few Stream App -Starters and compose them into a single application by using the functional-style programming model. Check out the -https://spring.io/blog/2019/01/09/composed-function-support-in-spring-cloud-data-flow["Composed Function Support in -Spring Cloud Data Flow"] blog to learn more about the developer and orchestration-experience with an example. - -===== Spring Cloud Task App Starters - -The following table includes the available Task Application Starters based on Spring Cloud Task 2.1.x and Spring Boot 2.1.x: +[[ootb-task-apps]] +===== Out-of-the-Box Task Applications +The following table includes the `dataflow.spring.io` links to the task applications based on Spring Cloud Task `2.4.x` and Spring Boot `2.7.x`. [width="100%",frame="topbot",options="header"] |====================== @@ -453,16 +371,16 @@ The following table includes the available Task Application Starters based on Sp |Maven |https://dataflow.spring.io/task-maven-latest -|https://dataflow.spring.io/Elston-BUILD-SNAPSHOT-task-applications-maven +|https://dataflow.spring.io/task-maven-latest-snapshot |Docker |https://dataflow.spring.io/task-docker-latest -|https://dataflow.spring.io/Elston-BUILD-SNAPSHOT-task-applications-docker +|https://dataflow.spring.io/task-docker-latest-snapshot |====================== -You can find more information about the available task starters in the https://cloud.spring.io/spring-cloud-task-app-starters/[Task App Starters Project Page] and -related reference documentation. For more information about the available stream starters, look at the https://cloud.spring.io/spring-cloud-stream-app-starters/[Stream App Starters Project Page] -and related reference documentation. +For more information about the available out-of-the-box stream applications see the https://cloud.spring.io/spring-cloud-task-app-starters/[Spring Cloud Stream Applications] project page. + +For more information about the available out-of-the-box task applications see https://github.com/spring-cloud/spring-cloud-dataflow-samples/tree/main/timestamp-task[timestamp-task] and https://github.com/spring-cloud/spring-cloud-dataflow-samples/tree/main/timestamp-batch[timestamp-batch] docs. As an example, if you would like to register all out-of-the-box stream applications built with the Kafka binder in bulk, you can use the following command: @@ -505,36 +423,12 @@ NOTE: In some cases, the resource is resolved on the server side. In others, the URI is passed to a runtime container instance, where it is resolved. See the specific documentation of each Data Flow Server for more detail. + [[custom-applications]] -==== Creating Custom Applications +==== Register Custom Applications While Data Flow includes source, processor, sink applications, you can extend these applications or write a custom link:https://github.com/spring-cloud/spring-cloud-stream[Spring Cloud Stream] application. - -The process of creating Spring Cloud Stream applications with https://start.spring.io/[Spring Initializr] is detailed in the Spring Cloud Stream {spring-cloud-stream-docs}#_getting_started[documentation]. -You can include multiple binders to an application. -If you do so, see the instructions in <> for how to configure them. - -To support allowing properties, Spring Cloud Stream applications running in Spring Cloud Data Flow can include the Spring Boot `configuration-processor` as an optional dependency, as shown in the following example: - -==== -[source,xml] ----- - - - - org.springframework.boot - spring-boot-configuration-processor - true - - - ----- -==== - -NOTE:Make sure that the `spring-boot-maven-plugin` is included in the POM. -The plugin is necessary for creating the executable jar that is registered with Spring Cloud Data Flow. -Spring Initialzr includes the plugin in the generated POM. - +You can follow the https://dataflow.spring.io/docs/stream-developer-guides/streams/standalone-stream-sample[Stream Development] guide on the Microsite to create your own custom application. Once you have created a custom application, you can register it, as described in <>. [[spring-cloud-dataflow-create-stream]] @@ -559,11 +453,11 @@ The `stream info` command shows useful information about the stream, as shown (w [source,bash] ---- dataflow:>stream info ticktock -╔═══════════╤═════════════════╤══════════╗ -║Stream Name│Stream Definition│ Status ║ -╠═══════════╪═════════════════╪══════════╣ -║ticktock │time | log │undeployed║ -╚═══════════╧═════════════════╧══════════╝ +╔═══════════╤═════════════════╤═══════════╤══════════╗ +║Stream Name│Stream Definition│Description│ Status ║ +╠═══════════╪═════════════════╪═══════════╪══════════╣ +║ticktock │time | log │ │undeployed║ +╚═══════════╧═════════════════╧═══════════╧══════════╝ ---- ==== @@ -591,20 +485,31 @@ The following listing shows the exposed properties for the `time` application: [source,bash,options="nowrap"] ---- dataflow:> app info --name time --type source +Information about source application 'time': +Version: '3.2.1': +Default application version: 'true': +Resource URI: maven://org.springframework.cloud.stream.app:time-source-rabbit:3.2.1 ╔══════════════════════════════╤══════════════════════════════╤══════════════════════════════╤══════════════════════════════╗ ║ Option Name │ Description │ Default │ Type ║ ╠══════════════════════════════╪══════════════════════════════╪══════════════════════════════╪══════════════════════════════╣ -║trigger.time-unit │The TimeUnit to apply to delay│ │java.util.concurrent.TimeUnit ║ -║ │values. │ │ ║ -║trigger.fixed-delay │Fixed delay for periodic │1 │java.lang.Integer ║ -║ │triggers. │ │ ║ -║trigger.cron │Cron expression value for the │ │java.lang.String ║ -║ │Cron Trigger. │ │ ║ -║trigger.initial-delay │Initial delay for periodic │0 │java.lang.Integer ║ -║ │triggers. │ │ ║ -║trigger.max-messages │Maximum messages per poll, -1 │1 │java.lang.Long ║ -║ │means infinity. │ │ ║ -║trigger.date-format │Format for the date value. │ │java.lang.String ║ +║spring.integration.poller.max-│Maximum number of messages to │ │java.lang.Integer ║ +║messages-per-poll │poll per polling cycle. │ │ ║ +║spring.integration.poller.fixe│Polling rate period. Mutually │ │java.time.Duration ║ +║d-rate │exclusive with 'fixedDelay' │ │ ║ +║ │and 'cron'. │ │ ║ +║spring.integration.poller.fixe│Polling delay period. Mutually│ │java.time.Duration ║ +║d-delay │exclusive with 'cron' and │ │ ║ +║ │'fixedRate'. │ │ ║ +║spring.integration.poller.rece│How long to wait for messages │1s │java.time.Duration ║ +║ive-timeout │on poll. │ │ ║ +║spring.integration.poller.cron│Cron expression for polling. │ │java.lang.String ║ +║ │Mutually exclusive with │ │ ║ +║ │'fixedDelay' and 'fixedRate'. │ │ ║ +║spring.integration.poller.init│Polling initial delay. Applied│ │java.time.Duration ║ +║ial-delay │for 'fixedDelay' and │ │ ║ +║ │'fixedRate'; ignored for │ │ ║ +║ │'cron'. │ │ ║ +║time.date-format │Format for the date value. │MM/dd/yy HH:mm:ss │java.lang.String ║ ╚══════════════════════════════╧══════════════════════════════╧══════════════════════════════╧══════════════════════════════╝ ---- ==== @@ -615,6 +520,10 @@ The following listing shows the exposed properties for the `log` application: [source,bash,options="nowrap"] ---- dataflow:> app info --name log --type sink +Information about sink application 'log': +Version: '3.2.1': +Default application version: 'true': +Resource URI: maven://org.springframework.cloud.stream.app:log-sink-rabbit:3.2.1 ╔══════════════════════════════╤══════════════════════════════╤══════════════════════════════╤══════════════════════════════╗ ║ Option Name │ Description │ Default │ Type ║ ╠══════════════════════════════╪══════════════════════════════╪══════════════════════════════╪══════════════════════════════╣ @@ -853,7 +762,7 @@ In this case, Spring Cloud Data Flow states that the stream is invalid because ` To update the stream, use the `stream update` command, which takes either `--properties` or `--propertiesFile` as a command argument. Skipper has an important new top-level prefix: `version`. -The following commands deploy `http | log` stream (and the version of `log` which registered at the time of deployment was `1.1.0.RELEASE`): +The following commands deploy `http | log` stream (and the version of `log` which registered at the time of deployment was `3.2.0`): ==== [source,bash] @@ -871,23 +780,23 @@ Stream Deployment properties: { "log" : { "spring.cloud.deployer.indexed" : "true", "spring.cloud.deployer.group" : "httptest", - "maven://org.springframework.cloud.stream.app:log-sink-rabbit" : "1.1.0.RELEASE" + "maven://org.springframework.cloud.stream.app:log-sink-rabbit" : "3.2.0" }, "http" : { "spring.cloud.deployer.group" : "httptest", - "maven://org.springframework.cloud.stream.app:http-source-rabbit" : "1.1.0.RELEASE" + "maven://org.springframework.cloud.stream.app:http-source-rabbit" : "3.2.0" } } ---- ==== -Then the following command updates the stream to use the `1.2.0.RELEASE` version of the log application. +Then the following command updates the stream to use the `3.2.1` version of the log application. Before updating the stream with the specific version of the application, we need to make sure that the application is registered with that version: ==== [source,bash] ---- -dataflow:>app register --name log --type sink --uri maven://org.springframework.cloud.stream.app:log-sink-rabbit:1.2.0.RELEASE +dataflow:>app register --name log --type sink --uri maven://org.springframework.cloud.stream.app:log-sink-rabbit:3.2.1 Successfully registered application 'sink:log' ---- ==== @@ -897,7 +806,7 @@ Then we can update the application: ==== [source,bash] ---- -dataflow:>stream update --name httptest --properties version.log=1.2.0.RELEASE +dataflow:>stream update --name httptest --properties version.log=3.2.1 ---- ==== @@ -920,11 +829,11 @@ Stream Deployment properties: { "spring.cloud.deployer.indexed" : "true", "spring.cloud.deployer.count" : "1", "spring.cloud.deployer.group" : "httptest", - "maven://org.springframework.cloud.stream.app:log-sink-rabbit" : "1.2.0.RELEASE" + "maven://org.springframework.cloud.stream.app:log-sink-rabbit" : "3.2.1" }, "http" : { "spring.cloud.deployer.group" : "httptest", - "maven://org.springframework.cloud.stream.app:http-source-rabbit" : "1.1.0.RELEASE" + "maven://org.springframework.cloud.stream.app:http-source-rabbit" : "3.2.1" } } ---- @@ -993,7 +902,7 @@ metadata: name: log spec: resource: maven://org.springframework.cloud.stream.app:log-sink-rabbit - version: 1.2.0.RELEASE + version: 3.2.0 applicationProperties: spring.cloud.dataflow.stream.app.label: log spring.cloud.stream.bindings.input.group: httptest @@ -1013,7 +922,7 @@ metadata: name: http spec: resource: maven://org.springframework.cloud.stream.app:http-source-rabbit - version: 1.2.0.RELEASE + version: 3.2.0 applicationProperties: spring.cloud.dataflow.stream.app.label: http spring.cloud.stream.bindings.output.producer.requiredGroups: httptest @@ -1139,7 +1048,7 @@ Function composition lets you attach a functional logic dynamically to an existi == Functional Applications -With Spring Cloud Stream 3.x adding link:https://cloud.spring.io/spring-cloud-static/spring-cloud-stream/current/reference/html/spring-cloud-stream.html#spring-cloud-stream-overview-producing-consuming-messages[functional support], you can build `Source`, `Sink` and `Processor` applications merely by implementing the Java Util's `Supplier`, `Consumer`, and `Function` interfaces respectively. +With Spring Cloud Stream 3.x adding link:https://docs.spring.io/spring-cloud-stream/docs/3.2.x/reference/html/spring-cloud-stream.html#spring_cloud_function[functional support], you can build `Source`, `Sink` and `Processor` applications merely by implementing the Java Util's `Supplier`, `Consumer`, and `Function` interfaces respectively. See the link:https://dataflow.spring.io/docs/recipes/functional-apps/[Functional Application Recipe] of the SCDF site for more about this feature. [[spring-cloud-dataflow-stream-examples]] @@ -1151,8 +1060,6 @@ This chapter includes the following examples: * <> * <> -You can find links to more samples in the "`<>`" chapter. - [[spring-cloud-dataflow-simple-stream]] === Simple Stream Processing diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/tasks.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/tasks.adoc index e29edbdb4c..5614987909 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/tasks.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/tasks.adoc @@ -15,13 +15,13 @@ A task application is short-lived, meaning that it stops running on purpose and One use case might be to scrape a web page and write to the database. The https://cloud.spring.io/spring-cloud-task/[Spring Cloud Task] framework is based on Spring Boot and adds the ability for Boot applications to record the lifecycle events of a short-lived application, such as when it starts, when it ends, and the exit status. -The https://docs.spring.io/spring-cloud-task/docs/{spring-cloud-task-version}/reference/htmlsingle/#features-task-execution-details[`TaskExecution`] documentation shows which information is stored in the database. -The entry point for code execution in a Spring Cloud Task application is most often an implementation of Boot's `CommandLineRunner` interface, as shown in this https://docs.spring.io/spring-cloud-task/docs/{spring-cloud-task-version}/reference/htmlsingle/#getting-started-writing-the-code[example]. +The https://docs.spring.io/spring-cloud-task/docs/{spring-cloud-task-version}/reference/html#features-task-execution-details[`TaskExecution`] documentation shows which information is stored in the database. +The entry point for code execution in a Spring Cloud Task application is most often an implementation of Boot's `CommandLineRunner` interface, as shown in this https://docs.spring.io/spring-cloud-task/docs/{spring-cloud-task-version}/reference/html#getting-started-writing-the-code[example]. The Spring Batch project is probably what comes to mind for Spring developers writing short-lived applications. Spring Batch provides a much richer set of functionality than Spring Cloud Task and is recommended when processing large volumes of data. One use case might be to read many CSV files, transform each row of data, and write each transformed row to a database. -Spring Batch provides its own database schema with a much more rich https://docs.spring.io/spring-batch/{spring-batch-doc-version}/reference/html/schema-appendix.html#metaDataSchema[set of information] about the execution of a Spring Batch job. +Spring Batch provides its own database schema with a much more rich https://docs.spring.io/spring-batch/docs/{spring-batch-version}/reference/html/schema-appendix.html#metaDataSchema[set of information] about the execution of a Spring Batch job. Spring Cloud Task is integrated with Spring Batch so that, if a Spring Cloud Task application defines a Spring Batch `Job`, a link between the Spring Cloud Task and Spring Cloud Batch execution tables is created. When running Data Flow on your local machine, Tasks are launched in a separate JVM. @@ -42,7 +42,8 @@ Before you dive deeper into the details of creating Tasks, you should understand [[spring-cloud-dataflow-create-task-apps]] === Creating a Task Application -While Spring Cloud Task does provide a number of out-of-the-box applications (at https://github.com/spring-cloud-task-app-starters[spring-cloud-task-app-starters]), most task applications require custom development. +Spring Cloud Dataflow provides a couple of out-of-the-box task applications (https://github.com/spring-cloud/spring-cloud-dataflow-samples/tree/main/timestamp-task[timestamp-task] and https://github.com/spring-cloud/spring-cloud-dataflow-samples/tree/main/timestamp-batch[timestamp-batch]) but most task applications require custom development. + To create a custom task application: . Use the https://start.spring.io[Spring Initializer] to create a new project, making sure to select the following starters: @@ -64,7 +65,7 @@ public class MyTask { ---- + . With this class, you need one or more `CommandLineRunner` or `ApplicationRunner` implementations within your application. You can either implement your own or use the ones provided by Spring Boot (there is one for running batch jobs, for example). -. Packaging your application with Spring Boot into an über jar is done through the standard {spring-boot-docs-reference}/html/getting-started-first-application.html#getting-started-first-application-executable-jar[Spring Boot conventions]. +. Packaging your application with Spring Boot into an über jar is done through the standard {spring-boot-docs}/#getting-started.first-application.executable-jar[Spring Boot conventions]. The packaged application can be registered and deployed as noted below. ==== Task Database Configuration @@ -287,6 +288,30 @@ NOTE: Properties configured by using this mechanism have lower precedence than t They are overridden if a property with the same key is specified at task launch time (for example, `app.trigger.prop2` overrides the common property). +==== Launching tasks with a specific application version + +When launching a task you can specify the specific version of the application. +If no version is specified Spring Cloud Data Flow will use the default version of the application. +To specify a version of the application to be used at launch time use the deployer property `version.`. +For example: + +==== +[source,bash,subs=attributes] +---- +task launch my-task --properties 'version.timestamp=3.0.0' +---- +==== + +Similarly, when scheduling a task you will use the same format of `version.`. For example: + +==== +[source,bash,subs=attributes] +---- +task schedule create --name my-schedule --definitionName my-task --expression '*/1 * * * *' --properties 'version.timestamp=3.0.0' +---- +==== + + [[spring-cloud-dataflow-task-limit-concurrent-executions]] === Limit the number concurrent task launches @@ -583,6 +608,11 @@ Establish the transaction isolation level for the Composed Task Runner. A list of available transaction isolation levels can be found https://docs.spring.io/spring-framework/docs/current/javadoc-api/org/springframework/transaction/TransactionDefinition.html[here]. Default is `ISOLATION_REPEATABLE_READ`. +* `max-start-wait-time` +The maximum amount of time, in milliseconds, that the Composed Task Runner will wait for the +`start_time` of a steps `taskExecution` to be set before the execution of the Composed task is failed (Integer, default: 0). +Determines the maximum time each child task is allowed for application startup. The default of `0` indicates no timeout. + * `max-wait-time` The maximum amount of time, in milliseconds, that an individual step can run before the execution of the Composed task is failed (Integer, default: 0). Determines the maximum time each child task is allowed to run before the CTR ends with a failure. The default of `0` indicates no timeout. @@ -734,7 +764,7 @@ Each of them ran successfully with an `Exit Code` as `0`. ===== Passing Properties to the Child Tasks To set the properties for child tasks in a composed task graph at task launch time, -use the following format: `app...`. +use the following format: `app..`. The following listing shows a composed task definition as an example: ==== @@ -749,16 +779,16 @@ To have `mytaskapp` display 'HELLO' and set the `mytimestamp` timestamp format t ==== [source,bash] ---- -task launch my-composed-task --properties "app.my-composed-task.mytaskapp.displayMessage=HELLO,app.my-composed-task.mytimestamp.timestamp.format=YYYY" +task launch my-composed-task --properties "app.mytaskapp.displayMessage=HELLO,app.mytimestamp.timestamp.format=YYYY" ---- ==== -Similar to application properties, you can also set the `deployer` properties for child tasks by using the following format: `deployer...`: +Similar to application properties, you can also set the `deployer` properties for child tasks by using the following format: `deployer..`: ==== [source,bash] ---- -task launch my-composed-task --properties "deployer.my-composed-task.mytaskapp.memory=2048m,app.my-composed-task.mytimestamp.timestamp.format=HH:mm:ss" +task launch my-composed-task --properties "deployer.mytaskapp.memory=2048m,app.mytimestamp.timestamp.format=HH:mm:ss" Launched task 'a1' ---- ==== @@ -773,7 +803,7 @@ You can pass command-line arguments for the composed task runner by using the `- dataflow:>task create my-composed-task --definition "" Created new task 'my-composed-task' -dataflow:>task launch my-composed-task --arguments "--increment-instance-enabled=true --max-wait-time=50000 --split-thread-core-pool-size=4" --properties "app.my-composed-task.bbb.timestamp.format=dd/MM/yyyy HH:mm:ss" +dataflow:>task launch my-composed-task --arguments "--increment-instance-enabled=true --max-wait-time=50000 --split-thread-core-pool-size=4" --properties "app.bbb.timestamp.format=dd/MM/yyyy HH:mm:ss" Launched task 'my-composed-task' ---- ==== @@ -867,7 +897,7 @@ If `task1` fails, `task2` does not launch. You can also use the Spring Cloud Data Flow Dashboard to create your conditional execution, by using the designer to drag and drop applications that are required and connecting them together to create your directed graph, as shown in the following image: .Conditional Execution -image::{dataflow-asciidoc}/images/dataflow-ctr-conditional-execution.png[Composed Task Conditional Execution, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-conditional-execution.png[Composed Task Conditional Execution, scaledwidth="50%"] The preceding diagram is a screen capture of the directed graph as it being created by using the Spring Cloud Data Flow Dashboard. You can see that four components in the diagram comprise a conditional execution: @@ -908,7 +938,7 @@ All other statuses returned by `cat` have no effect, and the task would end norm Using the Spring Cloud Data Flow Dashboard to create the same "`basic transition`" would resemble the following image: .Basic Transition -image::{dataflow-asciidoc}/images/dataflow-ctr-transition-basic.png[Composed Task Basic Transition, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-transition-basic.png[Composed Task Basic Transition, scaledwidth="50%"] The preceding diagram is a screen capture of the directed graph as it being created in the Spring Cloud Data Flow Dashboard. Notice that there are two different types of connectors: @@ -942,7 +972,7 @@ For any exit status of `cat` other than `FAILED`, `baz` would launch. Using the Spring Cloud Data Flow Dashboard to create the same "`transition with wildcard`" would resemble the following image: .Basic Transition With Wildcard -image::{dataflow-asciidoc}/images/dataflow-ctr-transition-basic-wildcard.png[Composed Task Basic Transition with Wildcard, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-transition-basic-wildcard.png[Composed Task Basic Transition with Wildcard, scaledwidth="50%"] ==== Transition With a Following Conditional Execution @@ -963,11 +993,26 @@ For any exit status of `foo` other than `FAILED` or `UNKNOWN`, `qux` would launc Using the Spring Cloud Data Flow Dashboard to create the same "`transition with conditional execution`" would resemble the following image: .Transition With Conditional Execution -image::{dataflow-asciidoc}/images/dataflow-ctr-transition-conditional-execution.png[Composed Task Transition with Conditional Execution, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-transition-conditional-execution.png[Composed Task Transition with Conditional Execution, scaledwidth="50%"] NOTE: In this diagram, the dashed line (transition) connects the `foo` application to the target applications, but a solid line connects the conditional executions between `foo`, `qux`, and `quux`. +==== Ignoring Exit Message +If any child task within a split returns an `ExitMessage` other than `COMPLETED` the split +will have an `ExitStatus` of `FAILED`. To ignore the `ExitMessage` of a child task, +add the `ignoreExitMessage=true` for each app that will return an `ExitMessage` +within the split. When using this flag, the `ExitStatus` of the task will be +`COMPLETED` if the `ExitCode` of the child task is zero. The split will have an +`ExitStatus` of `FAILED` if the `ExitCode`s is non zero. There are 2 ways to +set the `ignoreExitMessage` flag: + +1. Setting the property for each of the apps that need to have their exitMessage +ignored within the split. For example a split like `` where `BBB` +will return an `exitMessage`, you would set the `ignoreExitMessage` property like +`app.BBB.ignoreExitMessage=true` +2. You can also set it for all apps using the composed-task-arguments property, +for example: `--composed-task-arguments=--ignoreExitMessage=true`. [[spring-cloud-data-flow-split-execution]] === Split Execution @@ -988,7 +1033,7 @@ The preceding example launches tasks `foo`, `bar` and `baz` in parallel. Using the Spring Cloud Data Flow Dashboard to create the same "`split execution`" would resemble the following image: .Split -image::{dataflow-asciidoc}/images/dataflow-ctr-split.png[Composed Task Split, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-split.png[Composed Task Split, scaledwidth="50%"] With the task DSL, you can also run multiple split groups in succession, as shown in the following example: @@ -1007,7 +1052,7 @@ However, if `foo`, `bar`, or `baz` fails, the split containing `qux` and `quux` Using the Spring Cloud Data Flow Dashboard to create the same "`split with multiple groups`" would resemble the following image: .Split as a part of a conditional execution -image::{dataflow-asciidoc}/images/dataflow-ctr-multiple-splits.png[Composed Task Split, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-multiple-splits.png[Composed Task Split, scaledwidth="50%"] Notice that there is a `SYNC` control node that is inserted by the designer when connecting two consecutive splits. @@ -1032,7 +1077,7 @@ However, `bar` does not launch until `foo` completes successfully. Using the Spring Cloud Data Flow Dashboard to create the same " `split containing conditional execution` " resembles the following image: .Split with conditional execution -image::{dataflow-asciidoc}/images/dataflow-ctr-split-contains-conditional.png[Composed Task Split With Conditional Execution, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-split-contains-conditional.png[Composed Task Split With Conditional Execution, scaledwidth="50%"] ==== Establishing the Proper Thread Count for Splits @@ -1044,15 +1089,15 @@ Then `DDD` and `EEE` would run in parallel. [[spring-cloud-dataflow-launch-tasks-from-stream]] == Launching Tasks from a Stream -You can launch a task from a stream by using the https://github.com/spring-cloud-stream-app-starters/tasklauncher-dataflow/blob/master/spring-cloud-starter-stream-sink-task-launcher-dataflow/README.adoc[`task-launcher-dataflow`] sink. +You can launch a task from a stream by using the https://github.com/spring-cloud/spring-cloud-dataflow/tree/main/spring-cloud-dataflow-tasklauncher/README.adoc[`task-launcher-dataflow`] sink which is provided as a part of the Spring Cloud Data Flow project. The sink connects to a Data Flow server and uses its REST API to launch any defined task. -The sink accepts a https://github.com/spring-cloud-stream-app-starters/tasklauncher-dataflow/blob/master/spring-cloud-starter-stream-sink-task-launcher-dataflow/README.adoc#payload[JSON payload] representing a `task launch request`, which provides the name of the task to launch and may include command line arguments and deployment properties. +The sink accepts a https://github.com/spring-cloud/spring-cloud-dataflow/tree/main/spring-cloud-dataflow-tasklauncher/README.adoc#payload[JSON payload] representing a `task launch request`, which provides the name of the task to launch and may include command line arguments and deployment properties. -The https://github.com/spring-cloud-stream-app-starters/core/blob/master/common/app-starters-task-launch-request-common/README.adoc[`app-starters-task-launch-request-common`] component, in conjunction with Spring Cloud Stream https://docs.spring.io/spring-cloud-stream/docs/current-snapshot/reference/htmlsingle/#_functional_composition[functional composition], can transform the output of any source or processor to a task launch request. +The https://github.com/spring-cloud/stream-applications/tree/main/functions/function/task-launch-request-function/README.adoc[`task-launch-request-function`] component, in conjunction with Spring Cloud Stream https://docs.spring.io/spring-cloud-stream/docs/current-snapshot/reference/htmlsingle/#_functional_composition[functional composition], can transform the output of any source or processor to a task launch request. -Adding a dependency to `app-starters-task-launch-request-common` auto-configures a `java.util.function.Function` implementation, registered through https://cloud.spring.io/spring-cloud-function/[Spring Cloud Function] as a `taskLaunchRequest`. +Adding a dependency to `task-launch-request-function` auto-configures a `java.util.function.Function` implementation, registered through https://cloud.spring.io/spring-cloud-function/[Spring Cloud Function] as a `taskLaunchRequest`. -For example, you can start with the https://github.com/spring-cloud-stream-app-starters/time/tree/master/spring-cloud-starter-stream-source-time[time] source, add the following dependency, build it, and register it as a custom source. We call it `time-tlr` in this example: +For example, you can start with the https://github.com/spring-cloud/stream-applications/tree/main/applications/source/time-source[time] source, add the following dependency, build it, and register it as a custom source. ==== [source,xml] @@ -1064,14 +1109,16 @@ For example, you can start with the https://github.com/spring-cloud-stream-app-s ---- ==== -TIP: https://start-scs.cfapps.io/[Spring Cloud Stream Initializr] provides a great starting point for creating stream applications. +To build the application follow the instructions https://github.com/spring-cloud/stream-applications#building-stream-applications[here]. + +This will create an `apps` directory that contains `time-source-rabbit` and `time-source-kafka` directories in the `/applications/source/time-source` directory. In each of these you will see a target directory that contains a `time-source--.jar`. Now register the `time-source` jar (use the appropriate binder jar) with SCDF as a time source named `timestamp-tlr`. -Next, <> the `task-launcher-dataflow` sink and create a task (we use the provided timestamp task): +Next, register the `task-launcher-dataflow` sink with SCDF and create a task definition `timestamp-task`. Once this is complete create the stream definition as shown below: ==== [source,bash] ---- -stream create --name task-every-minute --definition "time-tlr --trigger.fixed-delay=60 --spring.cloud.stream.function.definition=taskLaunchRequest --task.launch.request.task-name=timestamp-task | task-launcher-dataflow" --deploy +stream create --name task-every-minute --definition 'timestamp-tlr --fixed-delay=60000 --task.launch.request.task-name=timestamp-task --spring.cloud.function.definition=\"timeSupplier|taskLaunchRequestFunction\"| tasklauncher-sink' --deploy ---- ==== @@ -1082,7 +1129,7 @@ The following stream definition illustrates the use of command line arguments. I ==== [source,bash] ---- -stream create --name task-every-second --definition "time-tlr --spring.cloud.stream.function.definition=taskLaunchRequest --task.launch.request.task-name=timestamp-task --task.launch.request.args=foo=bar --task.launch.request.arg-expressions=time=payload | task-launcher-dataflow" --deploy +stream create --name task-every-second --definition 'timestamp-tlr --task.launch.request.task-name=timestamp-task --spring.cloud.function.definition=\"timeSupplier|taskLaunchRequestFunction\" --task.launch.request.args=foo=bar --task.launch.request.arg-expressions=time=payload | tasklauncher-sink' --deploy ---- ==== @@ -1094,14 +1141,14 @@ You can then see the list of task executions by using the shell command `task ex [source,bash,options="nowrap"] ---- dataflow:>task execution list -╔════════════════════╤══╤════════════════════════════╤════════════════════════════╤═════════╗ -║ Task Name │ID│ Start Time │ End Time │Exit Code║ -╠════════════════════╪══╪════════════════════════════╪════════════════════════════╪═════════╣ -║timestamp-task_26176│4 │Tue May 02 12:13:49 EDT 2017│Tue May 02 12:13:49 EDT 2017│0 ║ -║timestamp-task_32996│3 │Tue May 02 12:12:49 EDT 2017│Tue May 02 12:12:49 EDT 2017│0 ║ -║timestamp-task_58971│2 │Tue May 02 12:11:50 EDT 2017│Tue May 02 12:11:50 EDT 2017│0 ║ -║timestamp-task_13467│1 │Tue May 02 12:10:50 EDT 2017│Tue May 02 12:10:50 EDT 2017│0 ║ -╚════════════════════╧══╧════════════════════════════╧════════════════════════════╧═════════╝ +╔══════════════╤═══╤════════════════════════════╤════════════════════════════╤═════════╗ +║ Task Name │ID │ Start Time │ End Time │Exit Code║ +╠══════════════╪═══╪════════════════════════════╪════════════════════════════╪═════════╣ +║timestamp-task│581│Thu Sep 08 11:38:33 EDT 2022│Thu Sep 08 11:38:33 EDT 2022│0 ║ +║timestamp-task│580│Thu Sep 08 11:38:31 EDT 2022│Thu Sep 08 11:38:31 EDT 2022│0 ║ +║timestamp-task│579│Thu Sep 08 11:38:29 EDT 2022│Thu Sep 08 11:38:29 EDT 2022│0 ║ +║timestamp-task│578│Thu Sep 08 11:38:26 EDT 2022│Thu Sep 08 11:38:26 EDT 2022│0 ║ +╚══════════════╧═══╧════════════════════════════╧════════════════════════════╧═════════╝ ---- ==== @@ -1113,43 +1160,30 @@ This pattern may be applied to any source to launch a task in response to any ev A composed task can be launched with the `task-launcher-dataflow` sink, as discussed <>. Since we use the `ComposedTaskRunner` directly, we need to set up the task definitions for the composed task runner itself, along with the composed tasks, prior to the creation of the composed task launching stream. Suppose we wanted to create the following composed task definition: `AAA && BBB`. -The first step would be to create the task definitions, as shown in the following example: +The first step would be to create the task definition, as shown in the following example: ==== [source] ---- -task create composed-task-runner --definition "composed-task-runner" -task create AAA --definition "timestamp" -task create BBB --definition "timestamp" +task create --name composed-task-sample --definition "AAA: timestamp && BBB: timestamp" ---- ==== -NOTE: Releases of `ComposedTaskRunner` can be found -https://github.com/spring-cloud-task-app-starters/composed-task-runner/releases[here]. +Now that the task definition we need for composed task definition is ready, we need to create a stream that launches `composed-task-sample`. +We create a stream with: -Now that the task definitions we need for composed task definition are ready, we need to create a stream that launches `ComposedTaskRunner`. -So, in this case, we create a stream with: - -* The `time` source customized to emit task launch requests, as shown <>. -* The `task-launcher-dataflow` sink that launches the `ComposedTaskRunner` +* The `timestamp-tlr` source customized to emit task launch requests, as shown <>. +* The `task-launcher` sink that launches the `composed-task-sample` The stream should resemble the following: ==== [source] ---- -stream create ctr-stream --definition "time --fixed-delay=30 --task.launch.request.task-name=composed-task-launcher --task.launch.request.args=--graph=AAA&&BBB,--increment-instance-enabled=true | task-launcher-dataflow" +stream create --name ctr-stream --definition "timestamp-tlr --fixed-delay=30000 --spring.cloud.function.definition=\"timeSupplier|taskLaunchRequestFunction\" --task.launch.request.task-name=composed-task-sample | tasklauncher-sink" --deploy ---- ==== -For now, we focus on the configuration that is required to launch the `ComposedTaskRunner`: - -* `graph`: This is the graph that is to be executed by the `ComposedTaskRunner`. -In this case it is `AAA&&BBB`. -* `increment-instance-enabled`: This lets each execution of `ComposedTaskRunner` be unique. -`ComposedTaskRunner` is built by using https://projects.spring.io/spring-batch/[Spring Batch]. -Thus, we want a new Job Instance for each launch of the `ComposedTaskRunner`. -To do this, we set `increment-instance-enabled` to be `true`. [[sharing-spring-cloud-dataflows-datastore-with-tasks]] == Sharing Spring Cloud Data Flow's Datastore with Tasks @@ -1222,7 +1256,7 @@ When using Kubernetes, a https://kubernetes.io/docs/concepts/workloads/controlle NOTE: Scheduled tasks do not implement the continuous deployment feature. Any changes to application version or properties for a task definition in Spring Cloud Data Flow will not affect scheduled tasks. .Architectural Overview -image::{dataflow-asciidoc}/images/dataflow-scheduling-architecture.png[Scheduler Architecture Overview, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-scheduling-architecture.png[Scheduler Architecture Overview, scaledwidth="50%"] === Enabling Scheduling @@ -1325,7 +1359,7 @@ As task applications evolve, you want to get your updates to production. This se When a task application is registered (see <>), a version is associated with it. A task application can have multiple versions associated with it, with one selected as the default. The following image illustrates an application with multiple versions associated with it (see the timestamp entry). -image::{dataflow-asciidoc}/images/dataflow-task-application-versions.png[Task Application Versions, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-task-application-versions.png[Task Application Versions, scaledwidth="50%"] Versions of an application are managed by registering multiple applications with the same name and coordinates, _except_ the version. For example, if you were to register an application with the following values, you would get one application registered with two versions (2.1.0.RELEASE and 2.1.1.RELEASE): @@ -1340,13 +1374,13 @@ Versions of an application are managed by registering multiple applications with Besides having multiple versions, Spring Cloud Data Flow needs to know which version to run on the next launch. This is indicated by setting a version to be the default version. Whatever version of a task application is configured as the default version is the one to be run on the next launch request. You can see which version is the default in the UI, as this image shows: -image::{dataflow-asciidoc}/images/dataflow-task-default-version.png[Task Application Default Version, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-task-default-version.png[Task Application Default Version, scaledwidth="50%"] === Task Launch Lifecycle In previous versions of Spring Cloud Data Flow, when the request to launch a task was received, Spring Cloud Data Flow would deploy the application (if needed) and run it. If the application was being run on a platform that did not need to have the application deployed every time (CloudFoundry, for example), the previously deployed application was used. This flow has changed in 2.3. The following image shows what happens when a task launch request comes in now: -image::{dataflow-asciidoc}/images/dataflow-task-launch-flow.png[Flow For Launching A Task, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-task-launch-flow.png[Flow For Launching A Task, scaledwidth="50%"] There are three main flows to consider in the preceding diagram. Launching the first time or launching with no changes is one. The other two are launching when there are changes but the appliction is not currently and launching when there are changes and the application is running. We look at the flow with no changes first. diff --git a/spring-cloud-dataflow-package/pom.xml b/spring-cloud-dataflow-package/pom.xml new file mode 100644 index 0000000000..822921eb5d --- /dev/null +++ b/spring-cloud-dataflow-package/pom.xml @@ -0,0 +1,52 @@ + + 4.0.0 + + org.springframework.cloud + spring-cloud-dataflow-parent + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent + + spring-cloud-dataflow-package + spring-cloud-dataflow-package + Data Flow Package + + pom + + ${project.version} + oss + 3.4.1 + + + + + org.apache.maven.plugins + maven-resources-plugin + 3.3.1 + + + org.apache.maven.plugins + maven-assembly-plugin + 3.5.0 + + spring-cloud-dataflow-${scdf.type.version}-install-${package.version} + + + + package + + single + + + false + + src/main/assembly/zip.xml + + + + + + + + diff --git a/spring-cloud-dataflow-package/set-package-version.sh b/spring-cloud-dataflow-package/set-package-version.sh new file mode 100755 index 0000000000..747291d83d --- /dev/null +++ b/spring-cloud-dataflow-package/set-package-version.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash +if [ -z "$BASH_VERSION" ]; then + echo "This script requires Bash. Use: bash $0 $*" + exit 0 +fi +SCDIR=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")") +MVNW=$SCDIR/../mvnw +if [ "$PACKAGE_VERSION" = "" ]; then + $MVNW help:evaluate -Dexpression=project.version -q -DforceStdout > /dev/null + PACKAGE_VERSION=$($MVNW help:evaluate -Dexpression=project.version -q -DforceStdout) + if [[ "$PACKAGE_VERSION" == *"Downloading"* ]]; then + PACKAGE_VERSION=$($MVNW help:evaluate -Dexpression=project.version -q -DforceStdout) + fi +fi +echo "PACKAGE_VERSION=$PACKAGE_VERSION" +if [[ "$PACKAGE_VERSION" != *"SNAPSHOT"* ]]; then + yq '.default.version="release"' -i "$SCDIR/../src/deploy/versions.yaml" + echo "Setting default.version=release, default.package-version=$PACKAGE_VERSION" + yq ".default.package-version=\"$PACKAGE_VERSION\"" -i "$SCDIR/../src/deploy/versions.yaml" + echo "Setting scdf-type.oss.release=$PACKAGE_VERSION" + yq ".scdf-type.oss.release=\"$PACKAGE_VERSION\"" -i "$SCDIR/../src/deploy/versions.yaml" +fi diff --git a/spring-cloud-dataflow-package/src/main/assembly/zip.xml b/spring-cloud-dataflow-package/src/main/assembly/zip.xml new file mode 100644 index 0000000000..4d85fa5f2f --- /dev/null +++ b/spring-cloud-dataflow-package/src/main/assembly/zip.xml @@ -0,0 +1,23 @@ + + distribution + false + + zip + + + + ${project.basedir}/../src/deploy + deploy + + **/*.adoc + **/*.jar + + + + ${project.basedir}/../src/kubernetes + deploy/kubernetes + + + \ No newline at end of file diff --git a/spring-cloud-dataflow-parent/pom.xml b/spring-cloud-dataflow-parent/pom.xml new file mode 100644 index 0000000000..1719f5031f --- /dev/null +++ b/spring-cloud-dataflow-parent/pom.xml @@ -0,0 +1,692 @@ + + + 4.0.0 + org.springframework.cloud + spring-cloud-dataflow-parent + spring-cloud-dataflow-parent + Data Flow Parent + + 2.11.6-SNAPSHOT + pom + https://cloud.spring.io/spring-cloud-dataflow/ + + + 4.9.9 + 2.11.6-SNAPSHOT + 1.8 + -Xdoclint:none + 3.3.1 + 2.7.18 + 5.3.39 + 5.7.12 + 3.4.7-SNAPSHOT + ${dataflow.version} + ${dataflow.version} + 2.9.6-SNAPSHOT + 2.4.6 + ${dataflow.version} + 0.8.8 + 3.0.2 + 2.2.0 + 1.5.5 + 0.5 + 1.5.4 + + 9.0.93 + 1.78.1 + 2.9.13 + 4.1.113.Final + 2020.0.47 + 1.1.4 + 1.33 + 2.4.11 + 9.37.3 + 1.1.10.6 + 1.26.1 + 2.15.1 + 2.17.2 + 2.11.1 + 3.0.2 + 2.10.6 + 1.12.676 + 1.19.8 + + 3.2.1 + 2.3.4 + 1.0.7 + 1.0.7 + 1.6.6 + 32.1.3-jre + 1.2.13 + 2.9.0 + 42.7.2 + 4.11.0 + + + + + org.postgresql + postgresql + ${postgresql.version} + + + com.google.guava + guava + ${guava.version} + + + com.h2database + h2 + 2.2.222 + + + net.minidev + json-smart + ${json-smart.version} + + + com.nimbusds + nimbus-jose-jwt + ${nimbus-jose-jwt.version} + + + org.yaml + snakeyaml + ${snakeyaml.version} + + + org.xerial.snappy + snappy-java + ${snappy-java.version} + + + com.jayway.jsonpath + json-path + ${json-path.version} + + + com.squareup.okhttp3 + okhttp + 4.11.0 + + + com.squareup.okio + okio + 3.4.0 + + + org.codehaus.jettison + jettison + ${jettison.version} + + + + com.fasterxml.jackson + jackson-bom + ${jackson-bom.version} + pom + import + + + + ch.qos.logback + logback-core + ${logback.version} + + + ch.qos.logback + logback-classic + ${logback.version} + + + ch.qos.logback + logback-access + ${logback.version} + + + + org.apache.tomcat.embed + tomcat-embed-core + ${tomcat.version} + + + org.apache.tomcat.embed + tomcat-embed-el + ${tomcat.version} + + + org.apache.tomcat.embed + tomcat-embed-jasper + ${tomcat.version} + + + org.apache.tomcat.embed + tomcat-embed-websocket + ${tomcat.version} + + + org.springframework.kafka + spring-kafka + ${spring-kafka.version} + + + + io.netty + netty-bom + ${netty.version} + pom + import + + + + io.projectreactor + reactor-bom + ${reactor-bom.version} + pom + import + + + + io.rsocket + rsocket-bom + ${rsocket.version} + pom + import + + + + org.bouncycastle + bcprov-jdk18on + ${bouncycastle.version} + + + org.bouncycastle + bcpkix-jdk18on + ${bouncycastle.version} + + + org.bouncycastle + bcutil-jdk18on + ${bouncycastle.version} + + + + org.springframework + spring-framework-bom + ${spring-framework.version} + pom + import + + + org.springframework.security + spring-security-bom + ${spring-security.version} + pom + import + + + org.springframework.cloud + spring-cloud-task-dependencies + ${spring-cloud-task.version} + pom + import + + + org.springframework.boot + spring-boot-dependencies + ${spring-boot.version} + pom + import + + + org.springframework.cloud + spring-cloud-dataflow-build-dependencies + ${dataflow.version} + pom + import + + + org.springframework.cloud + spring-cloud-dataflow-common-dependencies + ${dataflow.version} + pom + import + + + org.springframework.cloud + spring-cloud-common-security-config-core + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-common-security-config-web + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-starter-common-security-config-web + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-starter-single-step-batch-job + ${spring-cloud-task.version} + + + org.apache.commons + commons-compress + ${commons-compress.version} + + + commons-io + commons-io + ${commons-io.version} + + + org.testcontainers + testcontainers-bom + ${testcontainers.version} + + + org.apache.commons + commons-compress + + + pom + import + + + org.springframework.cloud + spring-cloud-dataflow-ui + ${spring-cloud-dataflow-ui.version} + + + org.springframework.cloud + spring-cloud-deployer-spi + ${spring-cloud-deployer.version} + + + org.springframework.cloud + spring-cloud-deployer-resource-support + ${spring-cloud-deployer.version} + + + org.springframework.cloud + spring-cloud-deployer-resource-maven + ${spring-cloud-deployer.version} + + + org.springframework.cloud + spring-cloud-deployer-resource-docker + ${spring-cloud-deployer.version} + + + org.springframework.cloud + spring-cloud-deployer-local + ${spring-cloud-deployer.version} + + + org.springframework.cloud + spring-cloud-deployer-cloudfoundry + ${spring-cloud-deployer.version} + + + org.springframework.cloud + spring-cloud-deployer-kubernetes + ${spring-cloud-deployer.version} + + + org.apache.directory.server + apacheds-protocol-ldap + ${apache-directory-server.version} + + + io.codearte.props2yaml + props2yaml + ${codearte-props2yml.version} + + + net.javacrumbs.json-unit + json-unit-assertj + ${json-unit.version} + + + com.google.code.findbugs + jsr305 + ${findbugs.version} + + + joda-time + joda-time + ${joda-time.version} + + + com.amazonaws + aws-java-sdk-ecr + ${aws-java-sdk-ecr.version} + + + org.springdoc + springdoc-openapi-ui + ${springdoc-openapi-ui.version} + + + + com.wavefront + wavefront-spring-boot-bom + ${wavefront-spring-boot-bom.version} + pom + import + + + org.springframework.cloud.stream.app + stream-applications-micrometer-common + ${stream-applications.version} + + + org.springframework.cloud.stream.app + stream-applications-security-common + ${stream-applications.version} + + + org.springframework.cloud.stream.app + stream-applications-postprocessor-common + ${stream-applications.version} + + + org.springframework.cloud + spring-cloud-deployer-dependencies + ${spring-cloud-deployer.version} + pom + import + + + + + + repo.spring.io + Spring Release Repository + https://repo.spring.io/libs-release-local + + + repo.spring.io + Spring Snapshot Repository + https://repo.spring.io/libs-snapshot-local + + + + Pivotal Software, Inc. + https://www.spring.io + + + + Apache License, Version 2.0 + https://www.apache.org/licenses/LICENSE-2.0 + + + + https://github.com/spring-cloud/spring-cloud-dataflow-build + scm:git:git://github.com/spring-cloud/spring-cloud-dataflow-build.git + + + scm:git:ssh://git@github.com/spring-cloud/spring-cloud-dataflow-build.git + + HEAD + + + + scdf-team + Data Flow Team + https://github.com/spring-cloud/spring-cloud-dataflow/graphs/contributors + + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + 3.1.0 + + + org.springframework.boot + spring-boot-maven-plugin + ${spring-boot.version} + + + org.sonarsource.scanner.maven + sonar-maven-plugin + ${sonar-maven-plugin.version} + + + org.jacoco + jacoco-maven-plugin + ${jacoco-maven-plugin.version} + + + org.apache.maven.plugins + maven-jar-plugin + 3.3.0 + + + org.apache.maven.plugins + maven-source-plugin + 3.0.1 + + + source + + jar + + package + + + + + io.github.git-commit-id + git-commit-id-maven-plugin + ${git-commit-id-plugin.version} + + + + revision + + + + + true + ${project.build.outputDirectory}/git.properties + full + + + + org.springframework.cloud + spring-cloud-dataflow-apps-docs-plugin + ${spring-cloud-dataflow-apps-docs-plugin.version} + + + generate-documentation + verify + + generate-documentation + + + + + + org.springframework.cloud + spring-cloud-dataflow-apps-metadata-plugin + ${spring-cloud-dataflow-apps-metadata-plugin.version} + + + org.apache.maven.plugins + maven-jxr-plugin + 3.1.1 + + + org.apache.maven.plugins + maven-surefire-plugin + 3.2.3 + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + ${java.version} + ${java.version} + + + + org.apache.maven.plugins + maven-surefire-report-plugin + 3.2.3 + + + org.apache.maven.plugins + maven-surefire-plugin + + junit-jupiter + 1 + 1 + true + + **/Abstract*.* + + + **/*Test.* + **/*Tests.* + + + + + org.jacoco + jacoco-maven-plugin + + + agent + + prepare-agent + + + + report + test + + report + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + 3.1.0 + + + + + + + org.apache.maven.plugins + maven-jxr-plugin + 3.1.1 + + + + + + deploymentfiles + + + + org.apache.maven.plugins + maven-resources-plugin + ${maven-resources-plugin.version} + + + replace-deployment-files + process-resources + + copy-resources + + + true + ${basedir}/../src + + + ${basedir}/../src/templates + + **/* + + true + + + + + + + + + + + spring + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + + + diff --git a/spring-cloud-dataflow-platform-cloudfoundry/pom.xml b/spring-cloud-dataflow-platform-cloudfoundry/pom.xml index a19a6e7cec..f43229588c 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/pom.xml +++ b/spring-cloud-dataflow-platform-cloudfoundry/pom.xml @@ -4,10 +4,17 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-platform-cloudfoundry + spring-cloud-dataflow-platform-cloudfoundry + Data Platform Cloud Foundry jar + + true + 3.4.1 + io.pivotal.cfenv @@ -16,6 +23,7 @@ org.springframework.cloud spring-cloud-dataflow-server-core + ${project.version} org.springframework.cloud @@ -34,6 +42,11 @@ io.pivotal.cfenv java-cfenv-boot-pivotal-sso + + org.assertj + assertj-core + test + org.springframework.boot spring-boot-starter-test @@ -42,6 +55,39 @@ org.springframework.cloud spring-cloud-dataflow-core + ${project.version} + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryDataFlowServerConfiguration.java b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryDataFlowServerConfiguration.java index acc7921120..66123857f5 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryDataFlowServerConfiguration.java +++ b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryDataFlowServerConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,8 +16,6 @@ package org.springframework.cloud.dataflow.server.config.cloudfoundry; -import javax.annotation.PostConstruct; - import reactor.core.publisher.Hooks; import org.springframework.boot.autoconfigure.condition.ConditionalOnCloudPlatform; @@ -27,16 +25,19 @@ import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeploymentProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.context.event.ContextRefreshedEvent; +import org.springframework.context.event.EventListener; /** * Configuration class for customizing Cloud Foundry deployer. * * @author Eric Bottard + * @author Corneil du Plessis */ @ConditionalOnCloudPlatform(CloudPlatform.CLOUD_FOUNDRY) -@Configuration +@Configuration(proxyBeanMethods = false) public class CloudFoundryDataFlowServerConfiguration { - + private CloudFoundryServerConfigurationProperties cloudFoundryServerConfigurationProperties = new CloudFoundryServerConfigurationProperties(); @Bean @ConfigurationProperties(prefix = CloudFoundryConnectionProperties.CLOUDFOUNDRY_PROPERTIES + ".task") public CloudFoundryDeploymentProperties taskDeploymentProperties() { @@ -45,14 +46,16 @@ public CloudFoundryDeploymentProperties taskDeploymentProperties() { @Bean public CloudFoundryServerConfigurationProperties cloudFoundryServerConfigurationProperties() { - return new CloudFoundryServerConfigurationProperties(); + return cloudFoundryServerConfigurationProperties; } - @PostConstruct - public void afterPropertiesSet() { - if (cloudFoundryServerConfigurationProperties().isDebugReactor()) { + // Replaced PostConstruct with handling ContextRefreshedEvent allows for the + // resolution of beans to complete and to execute the code when configuration + // is updated. + @EventListener(ContextRefreshedEvent.class) + public void handleContextRefreshedEvent() { + if (this.cloudFoundryServerConfigurationProperties.isDebugReactor()) { Hooks.onOperatorDebug(); } } - } diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformClientProvider.java b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformClientProvider.java index 122d47c938..5933b2c2f3 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformClientProvider.java +++ b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformClientProvider.java @@ -20,11 +20,14 @@ import java.util.Map; import org.cloudfoundry.client.CloudFoundryClient; +import org.cloudfoundry.logcache.v1.LogCacheClient; import org.cloudfoundry.reactor.client.ReactorCloudFoundryClient; +import org.cloudfoundry.reactor.logcache.v1.ReactorLogCacheClient; /** * @author David Turanski - **/ + * @author Chris Bono + */ public class CloudFoundryPlatformClientProvider { private final CloudFoundryPlatformProperties platformProperties; @@ -35,6 +38,8 @@ public class CloudFoundryPlatformClientProvider { private final Map cloudFoundryClients = new HashMap<>(); + private final Map cloudFoundryLogClients = new HashMap<>(); + CloudFoundryPlatformClientProvider( CloudFoundryPlatformProperties platformProperties, CloudFoundryPlatformConnectionContextProvider connectionContextProvider, @@ -45,10 +50,16 @@ public class CloudFoundryPlatformClientProvider { } public CloudFoundryClient cloudFoundryClient(String account){ - cloudFoundryClients.putIfAbsent(account, ReactorCloudFoundryClient.builder() + return cloudFoundryClients.computeIfAbsent(account, (__) -> ReactorCloudFoundryClient.builder() + .connectionContext(connectionContextProvider.connectionContext(account)) + .tokenProvider(platformTokenProvider.tokenProvider(account)) + .build()); + } + + public LogCacheClient logCacheClient(String account) { + return cloudFoundryLogClients.computeIfAbsent(account, (__) -> ReactorLogCacheClient.builder() .connectionContext(connectionContextProvider.connectionContext(account)) .tokenProvider(platformTokenProvider.tokenProvider(account)) .build()); - return cloudFoundryClients.get(account); } } diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactory.java b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactory.java index 576294f8dc..41da8276bf 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactory.java +++ b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactory.java @@ -35,6 +35,7 @@ import org.springframework.cloud.dataflow.core.AbstractTaskPlatformFactory; import org.springframework.cloud.dataflow.core.Launcher; import org.springframework.cloud.deployer.spi.app.AppDeployer; +import org.springframework.cloud.deployer.spi.cloudfoundry.ApplicationLogAccessor; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryAppDeployer; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryConnectionProperties; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeploymentProperties; @@ -85,7 +86,8 @@ public Launcher createLauncher(String account) { cloudFoundryClient, deploymentProperties(account), cloudFoundryOperations, - runtimeEnvironmentInfo(cloudFoundryClient, account)); + runtimeEnvironmentInfo(cloudFoundryClient, account), + new ApplicationLogAccessor(this.cloudFoundryClientProvider.logCacheClient(account))); Launcher launcher = new Launcher(account, CLOUDFOUNDRY_PLATFORM_TYPE, taskLauncher, scheduler(account, taskLauncher, cloudFoundryOperations)); CloudFoundryConnectionProperties connectionProperties = connectionProperties(account); diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformPropertiesTests.java b/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformPropertiesTests.java index 2e98ba4c3d..9da70d6a32 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformPropertiesTests.java +++ b/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformPropertiesTests.java @@ -18,23 +18,21 @@ import java.util.Map; import org.assertj.core.api.Assertions; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.context.annotation.Configuration; import org.springframework.test.context.ActiveProfiles; -import org.springframework.test.context.junit4.SpringRunner; import static org.assertj.core.api.Assertions.assertThat; /** * @author Donovan Muller * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -@RunWith(SpringRunner.class) @SpringBootTest(classes = CloudFoundryPlatformPropertiesTests.TestConfig.class) @ActiveProfiles("cloudfoundry-platform-properties") public class CloudFoundryPlatformPropertiesTests { @@ -54,9 +52,9 @@ public void deserializationTest() { assertThat(cfAccounts.get("dev").getDeployment().getMemory()).isEqualTo("512m"); assertThat(cfAccounts.get("dev").getDeployment().getDisk()).isEqualTo("2048m"); assertThat(cfAccounts.get("dev").getDeployment().getInstances()).isEqualTo(4); - assertThat(cfAccounts.get("dev").getDeployment().getAppNamePrefix().equals("dev1")); + assertThat(cfAccounts.get("dev").getDeployment().getAppNamePrefix()).isEqualTo("dev1"); assertThat(cfAccounts.get("dev").getDeployment().getServices()) - .containsExactly("rabbit", "mysql"); + .containsExactlyInAnyOrder("rabbit", "mysql"); assertThat(cfAccounts.get("qa").getConnection().getOrg()).isEqualTo("myOrgQA"); assertThat(cfAccounts.get("qa").getConnection().getClientId()).isEqualTo("id2"); @@ -64,9 +62,9 @@ public void deserializationTest() { assertThat(cfAccounts.get("qa").getDeployment().getMemory()).isEqualTo("756m"); assertThat(cfAccounts.get("qa").getDeployment().getDisk()).isEqualTo("724m"); assertThat(cfAccounts.get("qa").getDeployment().getInstances()).isEqualTo(2); - assertThat(cfAccounts.get("qa").getDeployment().getAppNamePrefix().equals("qa1")); + assertThat(cfAccounts.get("qa").getDeployment().getAppNamePrefix()).isEqualTo("qa1"); assertThat(cfAccounts.get("qa").getDeployment().getServices()) - .containsExactly("rabbitQA", "mysqlQA"); + .containsExactlyInAnyOrder("rabbitQA", "mysqlQA"); } @Configuration diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactoryTests.java b/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactoryTests.java index 5f8aa5793f..3c4fb482b2 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactoryTests.java +++ b/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactoryTests.java @@ -32,9 +32,10 @@ import org.cloudfoundry.client.v2.spaces.ListSpacesResponse; import org.cloudfoundry.client.v2.spaces.SpaceResource; import org.cloudfoundry.client.v2.spaces.Spaces; +import org.cloudfoundry.logcache.v1.LogCacheClient; import org.cloudfoundry.reactor.TokenProvider; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import reactor.core.publisher.Mono; import org.springframework.cloud.dataflow.core.Launcher; @@ -54,6 +55,7 @@ /** * @author David Turanski * @author Glenn Renfro + * @author Corneil du Plessis **/ public class CloudFoundryTaskPlatformFactoryTests { @@ -61,13 +63,13 @@ public class CloudFoundryTaskPlatformFactoryTests { private CloudFoundryPlatformConnectionContextProvider connectionContextProvider; - private CloudFoundryPlatformClientProvider cloudFoundryClientProvider = mock( - CloudFoundryPlatformClientProvider.class); + private CloudFoundryPlatformClientProvider cloudFoundryClientProvider; - private CloudFoundrySchedulerClientProvider cloudFoundrySchedulerClientProvider = mock( - CloudFoundrySchedulerClientProvider.class); + private CloudFoundrySchedulerClientProvider cloudFoundrySchedulerClientProvider; - private CloudFoundryClient cloudFoundryClient = mock(CloudFoundryClient.class); + private CloudFoundryClient cloudFoundryClient; + + private LogCacheClient logCacheClient; private CloudFoundryPlatformProperties cloudFoundryPlatformProperties; @@ -77,8 +79,13 @@ public class CloudFoundryTaskPlatformFactoryTests { private CloudFoundryDeploymentProperties deploymentProperties; - @Before + @BeforeEach public void setUp() throws Exception { + cloudFoundryClientProvider = mock(CloudFoundryPlatformClientProvider.class); + cloudFoundrySchedulerClientProvider = mock(CloudFoundrySchedulerClientProvider.class); + cloudFoundryClient = mock(CloudFoundryClient.class); + logCacheClient = mock(LogCacheClient.class); + when(this.cloudFoundryClient.info()) .thenReturn(getInfoRequest -> Mono.just(GetInfoResponse.builder().apiVersion("0.0.0").build())); when(this.cloudFoundryClient.organizations()).thenReturn(mock(Organizations.class)); @@ -86,6 +93,8 @@ public void setUp() throws Exception { when(this.cloudFoundryClient.organizations().list(any())).thenReturn(listOrganizationsResponse()); when(this.cloudFoundryClient.spaces().list(any())).thenReturn(listSpacesResponse()); when(this.cloudFoundryClientProvider.cloudFoundryClient(anyString())).thenReturn(this.cloudFoundryClient); + when(this.cloudFoundryClientProvider.logCacheClient(anyString())).thenReturn(this.logCacheClient); + this.cloudFoundryPlatformProperties = new CloudFoundryPlatformProperties(); this.defaultConnectionProperties = new CloudFoundryConnectionProperties(); @@ -229,7 +238,7 @@ private void setupMultiPlatform() throws Exception{ private Mono listOrganizationsResponse() { ListOrganizationsResponse response = ListOrganizationsResponse.builder() - .addAllResources(Collections.singletonList( + .addAllResources(Collections.singletonList( OrganizationResource.builder() .metadata(Metadata.builder().id("123").build()).build()) ).build(); @@ -238,7 +247,7 @@ private Mono listOrganizationsResponse() { private Mono listSpacesResponse() { ListSpacesResponse response = ListSpacesResponse.builder() - .addAllResources(Collections.singletonList( + .addAllResources(Collections.singletonList( SpaceResource.builder() .metadata(Metadata.builder().id("123").build()).build()) ).build(); diff --git a/spring-cloud-dataflow-platform-kubernetes/pom.xml b/spring-cloud-dataflow-platform-kubernetes/pom.xml index 85edc42c71..661c54260f 100644 --- a/spring-cloud-dataflow-platform-kubernetes/pom.xml +++ b/spring-cloud-dataflow-platform-kubernetes/pom.xml @@ -4,10 +4,19 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-platform-kubernetes + spring-cloud-dataflow-platform-kubernetes + Data Platform Kubernetes + jar + + true + 5.12.4 + 3.4.1 + org.springframework.cloud @@ -16,6 +25,7 @@ org.springframework.cloud spring-cloud-dataflow-server-core + ${project.version} org.springframework @@ -28,6 +38,7 @@ io.fabric8 kubernetes-client + ${kubernetes-fabric8-client.version} org.springframework.boot @@ -40,4 +51,45 @@ test + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesPlatformPropertiesTests.java b/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesPlatformPropertiesTests.java index 4f17da5e0e..a4b5f4ac19 100644 --- a/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesPlatformPropertiesTests.java +++ b/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesPlatformPropertiesTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,8 +18,8 @@ import java.util.Map; import io.fabric8.kubernetes.client.KubernetesClient; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; @@ -36,9 +36,11 @@ /** * @author Donovan Muller + * @author Chris Bono */ -@RunWith(SpringRunner.class) -@SpringBootTest(classes = KubernetesPlatformPropertiesTests.TestConfig.class) + +@SpringBootTest(classes = KubernetesPlatformPropertiesTests.TestConfig.class, + properties = { "spring.cloud.kubernetes.client.namespace=default" }) @ActiveProfiles("kubernetes-platform-properties") public class KubernetesPlatformPropertiesTests { diff --git a/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformFactoryTests.java b/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformFactoryTests.java index 47fd5b04b3..ed7104ea85 100644 --- a/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformFactoryTests.java +++ b/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformFactoryTests.java @@ -18,7 +18,7 @@ import java.util.Collections; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.core.Launcher; import org.springframework.cloud.dataflow.core.TaskPlatform; diff --git a/spring-cloud-dataflow-platform-kubernetes/src/test/resources/application-kubernetes-platform-properties.yml b/spring-cloud-dataflow-platform-kubernetes/src/test/resources/application-kubernetes-platform-properties.yml index 5094fee784..ea3fc7b20f 100644 --- a/spring-cloud-dataflow-platform-kubernetes/src/test/resources/application-kubernetes-platform-properties.yml +++ b/spring-cloud-dataflow-platform-kubernetes/src/test/resources/application-kubernetes-platform-properties.yml @@ -8,7 +8,7 @@ spring: dev: fabric8: masterUrl: https://192.168.0.1:8443 - namespace: dev1 + namespace: dev1 imagePullPolicy: Always entryPointStyle: exec limits: diff --git a/spring-cloud-dataflow-registry/pom.xml b/spring-cloud-dataflow-registry/pom.xml index a6c4be1451..bcb3862ecc 100644 --- a/spring-cloud-dataflow-registry/pom.xml +++ b/spring-cloud-dataflow-registry/pom.xml @@ -4,11 +4,17 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-registry spring-cloud-dataflow-registry + Data Flow Registry jar + + true + 3.4.1 + com.fasterxml.jackson.core @@ -17,6 +23,7 @@ org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.cloud @@ -56,7 +63,40 @@ org.springframework.cloud spring-cloud-dataflow-audit + ${project.version} compile + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/repository/AppRegistrationRepositoryImpl.java b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/repository/AppRegistrationRepositoryImpl.java index 25a78f8c0a..5c74adea94 100644 --- a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/repository/AppRegistrationRepositoryImpl.java +++ b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/repository/AppRegistrationRepositoryImpl.java @@ -28,6 +28,7 @@ import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; +import org.springframework.beans.factory.ObjectProvider; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.dataflow.core.AppRegistration; import org.springframework.cloud.dataflow.core.ApplicationType; @@ -49,7 +50,7 @@ public class AppRegistrationRepositoryImpl implements AppRegistrationRepositoryC private final EntityManager entityManager; @Autowired - private AppRegistrationRepository appRegistrationRepository; + private ObjectProvider appRegistrationRepository; public AppRegistrationRepositoryImpl(EntityManager entityManager) { Assert.notNull(entityManager, "Entity manager cannot be null"); @@ -84,7 +85,7 @@ public Page findAllByTypeAndNameIsLikeAndVersionAndDefaultVersi final List resultList = query.getResultList(); if (defaultVersion) { resultList.forEach(appRegistration -> { - HashSet versions = appRegistrationRepository.findAllByName(appRegistration.getName()).stream() + HashSet versions = appRegistrationRepository.getIfAvailable().findAllByName(appRegistration.getName()).stream() .filter(ar -> ar.getType() == appRegistration.getType()) .map(AppRegistration::getVersion).collect(Collectors.toCollection(HashSet::new)); appRegistration.setVersions(versions); diff --git a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/AppRegistryService.java b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/AppRegistryService.java index 39d458bde0..b12198adfa 100644 --- a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/AppRegistryService.java +++ b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/AppRegistryService.java @@ -21,6 +21,7 @@ import org.springframework.cloud.dataflow.core.AppRegistration; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.registry.support.NoSuchAppRegistrationException; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.core.io.Resource; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; @@ -69,9 +70,10 @@ public interface AppRegistryService { * @param version Version of the AppRegistration to save * @param uri Resource uri of the AppRegistration to save * @param metadataUri metadata of the AppRegistration to save + * @param bootVersion Spring Boot schema version indicating Task 2, Batch 4 or Task 3, Batch 5 * @return the saved AppRegistration */ - AppRegistration save(String name, ApplicationType type, String version, URI uri, URI metadataUri); + AppRegistration save(String name, ApplicationType type, String version, URI uri, URI metadataUri, AppBootSchemaVersion bootVersion); /** * Deletes an {@link AppRegistration}. If the {@link AppRegistration} does not exist, a diff --git a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryService.java b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryService.java index ae21112caa..65845f0270 100644 --- a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryService.java +++ b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryService.java @@ -20,12 +20,11 @@ import java.io.IOException; import java.io.InputStreamReader; import java.net.URI; -import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Properties; -import java.util.function.BiFunction; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -42,6 +41,7 @@ import org.springframework.cloud.dataflow.registry.repository.AppRegistrationRepository; import org.springframework.cloud.dataflow.registry.support.AppResourceCommon; import org.springframework.cloud.dataflow.registry.support.NoSuchAppRegistrationException; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.core.io.Resource; import org.springframework.core.io.support.PropertiesLoaderUtils; import org.springframework.data.domain.Page; @@ -71,6 +71,7 @@ * @author Oleg Zhurakousky * @author Christian Tzolov * @author Chris Schaefer + * @author Corneil du Plessis */ @Transactional public class DefaultAppRegistryService implements AppRegistryService { @@ -105,7 +106,9 @@ public AppRegistration find(String name, ApplicationType type) { @Override public AppRegistration find(String name, ApplicationType type, String version) { - return this.appRegistrationRepository.findAppRegistrationByNameAndTypeAndVersion(name, type, version); + AppRegistration registration = this.appRegistrationRepository.findAppRegistrationByNameAndTypeAndVersion(name, type, version); + logger.debug("find:{}:{}:{}={}", name, type, version, registration); + return registration; } @Override @@ -224,8 +227,8 @@ public Page findAll(Pageable pageable) { } @Override - public AppRegistration save(String name, ApplicationType type, String version, URI uri, URI metadataUri) { - return this.save(new AppRegistration(name, type, version, uri, metadataUri)); + public AppRegistration save(String name, ApplicationType type, String version, URI uri, URI metadataUri, AppBootSchemaVersion bootVersion) { + return this.save(new AppRegistration(name, type, version, uri, metadataUri, bootVersion)); } @Override @@ -363,24 +366,17 @@ else if (!StringUtils.hasText(uri.getSchemeSpecificPart())) { @Override public List importAll(boolean overwrite, Resource... resources) { - List registrations = new ArrayList<>(); - Stream.of(resources) - // parallel takes effect if multiple resources - .parallel() + List lines = Stream.of(resources) // take lines .flatMap(this::resourceAsLines) // take valid splitted lines - .flatMap(this::splitValidLines) - // reduce to AppRegistration map key'd by - .reduce(new HashMap(), reduceToAppRegistrations(), (left, right) -> { - // combiner is used if multiple resources caused parallel stream, - // then just let last processed resource to override. - left.putAll(right); - return left; - }) - // don't care about keys anymore - .values() - // back to stream + .flatMap(this::splitValidLines).collect(Collectors.toList()); + Map registrations = new HashMap<>(); + AppRegistration previous = null; + for(String [] line : lines) { + previous = createAppRegistrations(registrations, line, previous); + } + List result = registrations.values() .stream() // drop registration if it doesn't have main uri as user only had metadata .filter(ar -> ar.getUri() != null) @@ -388,54 +384,62 @@ public List importAll(boolean overwrite, Resource... resources) .filter(ar -> isOverwrite(ar, overwrite)) .map(ar -> { save(ar); - registrations.add(ar); return ar; }).collect(Collectors.toList()); - return registrations; + return result; } - private BiFunction, ? super String[], HashMap> reduceToAppRegistrations() { - return (map, lineSplit) -> { - String[] typeName = lineSplit[0].split("\\."); - if (typeName.length < 2 || typeName.length > 3) { - throw new IllegalArgumentException("Invalid format for app key '" + lineSplit[0] - + "'in file. Must be . or ..metadata"); - } - String type = typeName[0].trim(); - String name = typeName[1].trim(); - String version = getResourceVersion(lineSplit[1]); - // This is now versioned key - String key = type + name + version; - if (!map.containsKey(key) && map.containsKey(type + name + "latest")) { - key = type + name + "latest"; + private AppRegistration createAppRegistrations(Map registrations, String[] lineSplit, AppRegistration previous) { + String[] typeName = lineSplit[0].split("\\."); + if (typeName.length < 2 || typeName.length > 3) { + throw new IllegalArgumentException("Invalid format for app key '" + lineSplit[0] + + "'in file. Must be . or ..metadata or ..bootVersion"); + } + String type = typeName[0].trim(); + String name = typeName[1].trim(); + String extra = typeName.length == 3 ? typeName[2] : null; + String version = "bootVersion".equals(extra) ? null : getResourceVersion(lineSplit[1]); + // This is now versioned key + String key = type + name + version; + if (!registrations.containsKey(key) && registrations.containsKey(type + name + "latest")) { + key = type + name + "latest"; + } + if("bootVersion".equals(extra)) { + if (previous == null) { + throw new IllegalArgumentException("Expected uri for bootVersion:" + lineSplit[0]); } - AppRegistration ar = map.getOrDefault(key, new AppRegistration()); - ar.setName(name); - ar.setType(ApplicationType.valueOf(type)); - ar.setVersion(version); - if (typeName.length == 2) { - // normal app uri - try { - ar.setUri(new URI(lineSplit[1])); - warnOnMalformedURI(lineSplit[0], ar.getUri()); - } - catch (Exception e) { - throw new IllegalArgumentException(e); - } + ApplicationType appType = ApplicationType.valueOf(type); + Assert.isTrue(appType == previous.getType() && name.equals(previous.getName()), "Expected previous to be same type and name for:" + lineSplit[0]); + previous.setBootVersion(AppBootSchemaVersion.fromBootVersion(lineSplit[1])); + return previous; + } + AppRegistration ar = registrations.getOrDefault(key, new AppRegistration()); + ar.setName(name); + ar.setType(ApplicationType.valueOf(type)); + ar.setVersion(version); + if (typeName.length == 2) { + // normal app uri + try { + ar.setUri(new URI(lineSplit[1])); + warnOnMalformedURI(lineSplit[0], ar.getUri()); + } catch (Exception e) { + throw new IllegalArgumentException(e); } - else if (typeName.length == 3) { + } else if (typeName.length == 3) { + if (extra.equals("metadata")) { // metadata app uri try { ar.setMetadataUri(new URI(lineSplit[1])); warnOnMalformedURI(lineSplit[0], ar.getMetadataUri()); - } - catch (Exception e) { + } catch (Exception e) { throw new IllegalArgumentException(e); } + } else if (!"bootVersion".equals(extra)) { + throw new IllegalArgumentException("Invalid property: " + lineSplit[0]); } - map.put(key, ar); - return map; - }; + } + registrations.put(key, ar); + return ar; } private Stream resourceAsLines(Resource resource) { diff --git a/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryServiceTests.java b/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryServiceTests.java index f2bd076d42..487e1927c1 100644 --- a/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryServiceTests.java +++ b/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryServiceTests.java @@ -21,9 +21,10 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.stream.Collectors; -import org.hamcrest.Matchers; -import org.junit.Test; +import org.assertj.core.api.Condition; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.springframework.cloud.dataflow.audit.service.DefaultAuditRecordService; @@ -31,6 +32,7 @@ import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.registry.repository.AppRegistrationRepository; import org.springframework.cloud.dataflow.registry.support.AppResourceCommon; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.cloud.deployer.resource.maven.MavenProperties; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.DefaultResourceLoader; @@ -40,16 +42,14 @@ import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.PageRequest; -import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasProperty; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +import static org.assertj.core.api.Assertions.assertThat; + import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.reset; @@ -64,20 +64,21 @@ * @author Chris Schaefer * @author Ilayaperumal Gopinathan * @author David Turanski + * @author Corneil du Plessis */ public class DefaultAppRegistryServiceTests { - private AppRegistrationRepository appRegistrationRepository = mock(AppRegistrationRepository.class); + private final AppRegistrationRepository appRegistrationRepository = mock(AppRegistrationRepository.class); - private ResourceLoader resourceLoader = new DefaultResourceLoader(); + private final ResourceLoader resourceLoader = new DefaultResourceLoader(); - private AppRegistryService appRegistryService = new DefaultAppRegistryService(appRegistrationRepository, + private final AppRegistryService appRegistryService = new DefaultAppRegistryService(appRegistrationRepository, new AppResourceCommon(new MavenProperties(), resourceLoader), mock(DefaultAuditRecordService.class)); @Test public void testNotFound() { AppRegistration registration = appRegistryService.find("foo", ApplicationType.source); - assertThat(registration, Matchers.nullValue()); + assertThat(registration).isNull(); } @Test @@ -87,8 +88,8 @@ public void testFound() { eq(registration.getName()), eq(registration.getType()))).thenReturn(registration); AppRegistration registration2 = appRegistryService.find("foo", ApplicationType.source); - assertThat(registration2.getName(), is("foo")); - assertThat(registration2.getType(), is(ApplicationType.source)); + assertThat(registration2.getName()).isEqualTo("foo"); + assertThat(registration2.getType()).isEqualTo(ApplicationType.source); } @Test @@ -100,7 +101,7 @@ public void testMetadataResourceResolvesWhenAvailable() { AppRegistration registration2 = appRegistryService.find("foo", ApplicationType.source); Resource appMetadataResource = appRegistryService.getAppMetadataResource(registration2); - assertThat(appMetadataResource.getFilename(), is("foo-source-metadata")); + assertThat(appMetadataResource.getFilename()).isEqualTo("foo-source-metadata"); } @Test @@ -113,7 +114,7 @@ public void testMetadataResourceNotAvailableResolvesToMainResource() { AppRegistration registration2 = appRegistryService.find("foo", ApplicationType.source); Resource appMetadataResource = appRegistryService.getAppMetadataResource(registration2); - assertThat(appMetadataResource.getFilename(), is("foo-source")); + assertThat(appMetadataResource.getFilename()).isEqualTo("foo-source"); } @Test @@ -125,24 +126,16 @@ public void testFindAll() { List registrations = appRegistryService.findAll(); - assertThat(registrations, containsInAnyOrder( - allOf( - hasProperty("name", is("foo")), - hasProperty("uri", is(URI.create("classpath:/foo-source"))), - hasProperty("metadataUri", is(URI.create("classpath:/foo-source-metadata"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("bar")), - hasProperty("uri", is(URI.create("classpath:/bar-source"))), - hasProperty("metadataUri", is(URI.create("classpath:/bar-source-metadata"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("foo")), - hasProperty("uri", is(URI.create("classpath:/foo-sink"))), - hasProperty("metadataUri", nullValue()), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("foo", URI.create("classpath:/foo-source"), URI.create("classpath:/foo-source-metadata"), ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("bar", URI.create("classpath:/bar-source"), URI.create("classpath:/bar-source-metadata"), ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("foo", URI.create("classpath:/foo-sink"), null, ApplicationType.sink)); } + static Condition appRegistrationWith(String name, URI uri, URI metadata, ApplicationType type) { + return metadata != null ? + new Condition<>(item -> name.equals(item.getName()) && uri.equals(item.getUri()) && metadata.equals(item.getMetadataUri()) && type.equals(item.getType()), "AppRegistrationWith") : + new Condition<>(item -> name.equals(item.getName()) && uri.equals(item.getUri()) && item.getMetadataUri() == null && type.equals(item.getType()), "AppRegistrationWith"); + } @Test public void testFindAllPageable() { AppRegistration fooSource = appRegistration("foo", ApplicationType.source, true); @@ -161,7 +154,7 @@ public void testFindAllPageable() { PageRequest pageRequest2 = PageRequest.of(1, 2); when(appRegistrationRepository.findAll(eq(pageRequest2))) - .thenReturn(new PageImpl(Arrays.asList(fooSource), pageRequest2, 3)); + .thenReturn(new PageImpl(Collections.singletonList(fooSource), pageRequest2, 3)); Page registrations2 = appRegistryService.findAll(pageRequest2); assertEquals(3, registrations2.getTotalElements()); @@ -207,7 +200,7 @@ public void testSaveExistingApp() { when(appRegistrationRepository.findAppRegistrationByNameAndTypeAndVersion( eq(fooSource2.getName()), eq(fooSource2.getType()), eq(fooSource2.getVersion()))) - .thenReturn(fooSource2); + .thenReturn(fooSource2); appRegistryService.save(fooSource); @@ -226,8 +219,7 @@ public void testImportAllOverwrite() { eq("foo"), eq(ApplicationType.source), eq("1.0"))).thenReturn(appRegistration()); when(appRegistrationRepository.findAppRegistrationByNameAndTypeAndVersion( eq("bar"), eq(ApplicationType.sink), eq("1.0"))).thenReturn(appRegistration()); - assertThat(appRegistryService.importAll(false, - new ClassPathResource("AppRegistryTests-importAllOverwrite.properties", getClass())).size(), equalTo(0)); + assertThat(appRegistryService.importAll(false, new ClassPathResource("AppRegistryTests-importAllOverwrite.properties", getClass()))).isEmpty(); } @Test @@ -238,12 +230,10 @@ public void testImportRealWorldJarsWithMetadata() { verify(appRegistrationRepository, times(1)).save(appRegistrationCaptor.capture()); List registrations = appRegistrationCaptor.getAllValues(); AppRegistration appRegistration = registrations.get(0); - assertThat(appRegistration, hasProperty("name", is("cassandra"))); - assertThat(appRegistration, hasProperty("uri", - is(URI.create("http://repo.spring.io/release/org/springframework/cloud/stream/app/cassandra-sink-rabbit/2.1.0.RELEASE/cassandra-sink-rabbit-2.1.0.RELEASE.jar")))); - assertThat(appRegistration, hasProperty("metadataUri", - is(URI.create("http://repo.spring.io/release/org/springframework/cloud/stream/app/cassandra-sink-rabbit/2.1.0.RELEASE/cassandra-sink-rabbit-2.1.0.RELEASE-metadata.jar")))); - assertThat(appRegistration, hasProperty("type", is(ApplicationType.sink))); + assertThat(appRegistration.getName()).isEqualTo("cassandra"); + assertThat(appRegistration.getUri()).isEqualTo(URI.create("http://repo.spring.io/release/org/springframework/cloud/stream/app/cassandra-sink-rabbit/2.1.0.RELEASE/cassandra-sink-rabbit-2.1.0.RELEASE.jar")); + assertThat(appRegistration.getMetadataUri()).isEqualTo(URI.create("http://repo.spring.io/release/org/springframework/cloud/stream/app/cassandra-sink-rabbit/2.1.0.RELEASE/cassandra-sink-rabbit-2.1.0.RELEASE-metadata.jar")); + assertThat(appRegistration.getType()).isEqualTo(ApplicationType.sink); } @Test @@ -262,18 +252,8 @@ public void testImportAll() { List registrations = appRegistrationCaptor.getAllValues(); - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("bar")), - hasProperty("uri", is(URI.create("http:/bar-source-1.0.0"))), - hasProperty("metadataUri", is(URI.create("http:/bar-source-metadata-1.0.0"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("foo")), - hasProperty("uri", is(URI.create("http:/foo-sink-1.0.0"))), - hasProperty("metadataUri", nullValue()), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("bar", URI.create("http:/bar-source-1.0.0"), URI.create("http:/bar-source-metadata-1.0.0"), ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("foo", URI.create("http:/foo-sink-1.0.0"), null, ApplicationType.sink)); // // Now import with overwrite = true // @@ -287,23 +267,9 @@ public void testImportAll() { registrations = appRegistrationCaptor.getAllValues(); - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("foo")), - hasProperty("uri", is(URI.create("http:/foo-source-1.0.0"))), - hasProperty("metadataUri", is(URI.create("http:/foo-source-metadata-1.0.0"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("bar")), - hasProperty("uri", is(URI.create("http:/bar-source-1.0.0"))), - hasProperty("metadataUri", is(URI.create("http:/bar-source-metadata-1.0.0"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("foo")), - hasProperty("uri", is(URI.create("http:/foo-sink-1.0.0"))), - hasProperty("metadataUri", nullValue()), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("foo", URI.create("http:/foo-source-1.0.0"), URI.create("http:/foo-source-metadata-1.0.0"), ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("bar", URI.create("http:/bar-source-1.0.0"), URI.create("http:/bar-source-metadata-1.0.0"), ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("foo", URI.create("http:/foo-sink-1.0.0"), null, ApplicationType.sink)); } @Test @@ -322,29 +288,10 @@ public void testImportMixedVersions() { verify(appRegistrationRepository, times(4)).save(appRegistrationCaptor.capture()); List registrations = appRegistrationCaptor.getAllValues(); - - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("time", URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"), ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("time", URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"), ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("log", URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"), ApplicationType.sink)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("log", URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"), ApplicationType.sink)); } @Test @@ -365,28 +312,10 @@ public void testImportMixedVersionsMultiFile() { List registrations = appRegistrationCaptor.getAllValues(); - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("time", URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"), ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("time", URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"), ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("log", URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"), ApplicationType.sink)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("log", URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"), ApplicationType.sink)); } @Test @@ -406,28 +335,10 @@ public void testImportMixedVersionsWithSpaceAndComments() { List registrations = appRegistrationCaptor.getAllValues(); - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("time", URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"), ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("time", URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"), ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("log", URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"), ApplicationType.sink)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("log", URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"), ApplicationType.sink)); } @Test @@ -447,28 +358,10 @@ public void testImportMixedVersionsWithMixedOrder() { List registrations = appRegistrationCaptor.getAllValues(); - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("time", URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"), ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("time", URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"), ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("log", URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"), ApplicationType.sink)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("log", URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"), ApplicationType.sink)); } @Test @@ -487,24 +380,9 @@ public void testImportMixedVersionsWithMissingAndOnlyMetadata() { verify(appRegistrationRepository, times(3)).save(appRegistrationCaptor.capture()); List registrations = appRegistrationCaptor.getAllValues(); - - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"))), - hasProperty("metadataUri", nullValue()), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("time", URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"), ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("time", URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"), null, ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("log", URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"), ApplicationType.sink)); } @Test @@ -518,18 +396,8 @@ public void testImportAllDockerLatest() { List registrations = appRegistrationCaptor.getAllValues(); - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("foo")), - hasProperty("uri", is(URI.create("docker:springcloudstream/foo-source-kafka:latest"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:foo-source-kafka:jar:metadata:2.1.2.BUILD-SNAPSHOT"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("foo")), - hasProperty("uri", is(URI.create("docker:springcloudstream/foo-sink-kafka:latest"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:foo-sink-kafka:jar:metadata:2.1.2.BUILD-SNAPSHOT"))), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("foo", URI.create("docker:springcloudstream/foo-source-kafka:latest"), URI.create("maven://org.springframework.cloud.stream.app:foo-source-kafka:jar:metadata:2.1.2.BUILD-SNAPSHOT"), ApplicationType.source)); + assertThat(registrations).haveAtLeastOne(appRegistrationWith("foo", URI.create("docker:springcloudstream/foo-sink-kafka:latest"), URI.create("maven://org.springframework.cloud.stream.app:foo-sink-kafka:jar:metadata:2.1.2.BUILD-SNAPSHOT"), ApplicationType.sink)); } @Test @@ -548,6 +416,49 @@ public void testDeleteAll() throws URISyntaxException { verify(appRegistrationRepository, times(1)).deleteAll(appsToDelete); } + @Test + public void testMultipleBootVersions() { + // given + Resource resource = new ClassPathResource("AppRegistryTests-importMultipleBootVersions.properties", getClass()); + // when + List result = appRegistryService.importAll(false, resource); + // then + List boot2 = result.stream().filter(r -> r.getBootVersion().equals(AppBootSchemaVersion.BOOT2)).collect(Collectors.toList()); + List boot3 = result.stream().filter(r -> r.getBootVersion().equals(AppBootSchemaVersion.BOOT3)).collect(Collectors.toList()); + assertEquals(1L, boot2.size()); + assertEquals(1L, boot3.size()); + assertEquals("2.0.1", boot2.get(0).getVersion()); + assertEquals("3.0.0", boot3.get(0).getVersion()); + } + @Test + public void testMultipleBootVersionsExpectError() { + // given + Resource resource = new ClassPathResource("AppRegistryTests-importInvalidBootVersions.properties", getClass()); + // when + try { + appRegistryService.importAll(false, resource); + fail("Expected Exception"); + } catch (IllegalArgumentException x) { + // then + assertTrue(x.toString().contains("Invalid")); + } + } + @Test + public void testBootVersionsMissingURI() { + // given + Resource resource = new ClassPathResource("AppRegistryTests-importBootVersionsMissingURI.properties", getClass()); + // when + try { + appRegistryService.importAll(false, resource); + fail("Expected Exception"); + } catch (IllegalArgumentException x) { + // then + assertNotNull(x.getMessage()); + System.out.println("Exception:" + x.getMessage()); + assertTrue(x.getMessage().startsWith("Expected uri for bootVersion") || x.getMessage().startsWith("Expected previous to be same type and name for")); + } + } + private AppRegistration appRegistration() { return appRegistration("foo", ApplicationType.source, true); } diff --git a/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/AppResourceCommonTests.java b/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/AppResourceCommonTests.java index 70d1c136b7..7a00417b15 100644 --- a/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/AppResourceCommonTests.java +++ b/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/AppResourceCommonTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2019 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,7 +18,7 @@ import java.net.MalformedURLException; import java.net.URI; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.cloud.deployer.resource.docker.DockerResource; import org.springframework.cloud.deployer.resource.maven.MavenProperties; @@ -29,8 +29,8 @@ import org.springframework.core.io.UrlResource; import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.fail; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -45,177 +45,158 @@ public class AppResourceCommonTests { private ResourceLoader resourceLoader = mock(ResourceLoader.class); private AppResourceCommon appResourceCommon = new AppResourceCommon(new MavenProperties(), resourceLoader); - @Test(expected = IllegalArgumentException.class) + @Test public void testBadNamedJars() throws Exception { - UrlResource urlResource = new UrlResource("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/1.2.0.RELEASE/file-sink-rabbit.jar"); - appResourceCommon.getUrlResourceVersion(urlResource); + UrlResource urlResource = new UrlResource("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/3.2.1/file-sink-rabbit.jar"); + assertThatIllegalArgumentException().isThrownBy( () -> appResourceCommon.getUrlResourceVersion(urlResource)); } @Test public void testInvalidUrlResourceWithoutVersion() throws Exception { assertThat(appResourceCommon.getUrlResourceWithoutVersion( - new UrlResource("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/1.2.0.RELEASE/file-sink-rabbit-1.2.0.RELEASE.jar"))) - .isEqualTo("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/1.2.0.RELEASE/file-sink-rabbit"); + new UrlResource("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/3.2.1/file-sink-rabbit-3.2.1.jar"))) + .isEqualTo("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/3.2.1/file-sink-rabbit"); } @Test public void testInvalidURIPath() throws Exception { UrlResource urlResource = new UrlResource("https://com.com-0.0.2-SNAPSHOT"); - try { - appResourceCommon.getUrlResourceVersion(urlResource); - fail("Excepted IllegalArgumentException for an invalid URI path"); - } - catch (Exception e) { - assertThat(e.getMessage().equals("URI path doesn't exist")); - } + assertThatThrownBy(() -> appResourceCommon.getUrlResourceVersion(urlResource)) + .hasMessage("URI path doesn't exist"); } @Test public void testInvalidUriSchema() { - try { - appResourceCommon.getResource("springcloud/polyglot-python-processor:0.1"); - fail("Excepted IllegalArgumentException for an invalid URI schema prefix"); - } - catch (IllegalArgumentException iae) { - assertThat(iae.getMessage().equals("Invalid URI schema for resource: " + + assertThatIllegalArgumentException().isThrownBy(() -> + appResourceCommon.getResource("springcloud/polyglot-python-processor:0.1")) + .withMessage("Invalid URI schema for resource: " + "springcloud/polyglot-python-processor:0.1 Expected URI schema prefix like file://, " + - "http:// or classpath:// but got none")); - } + "http:// or classpath:// but got none"); } @Test public void testDefaultResource() { String classpathUri = "classpath:AppRegistryTests-importAll.properties"; Resource resource = appResourceCommon.getResource(classpathUri); - assertTrue(resource instanceof ClassPathResource); + assertThat(resource instanceof ClassPathResource).isTrue(); } @Test public void testDockerUriString() throws Exception { - String dockerUri = "docker:springcloudstream/log-sink-rabbit:1.2.0.RELEASE"; + String dockerUri = "docker:springcloudstream/log-sink-rabbit:3.2.1"; Resource resource = appResourceCommon.getResource(dockerUri); - assertTrue(resource instanceof DockerResource); + assertThat(resource instanceof DockerResource).isTrue(); assertThat(resource.getURI().toString().equals(dockerUri)); } @Test public void testJarMetadataUriDockerApp() throws Exception { - String appUri = "docker:springcloudstream/log-sink-rabbit:1.2.0.RELEASE"; - String metadataUri = "https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/1.2.0.RELEASE/file-sink-rabbit-1.2.0.RELEASE.jar"; - Resource metadataResource = appResourceCommon.getMetadataResource(new URI(appUri), new URI(metadataUri)); + String appUri = "docker:springcloudstream/log-sink-rabbit:3.2.1"; + String metadataUri = "https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/3.2.1/file-sink-rabbit-3.2.1.jar"; + appResourceCommon.getMetadataResource(new URI(appUri), new URI(metadataUri)); verify(resourceLoader).getResource(eq(metadataUri)); } @Test public void testMetadataUriHttpApp() throws Exception { - String appUri = "https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/1.2.0.RELEASE/file-sink-rabbit-1.2.0.RELEASE.jar"; + String appUri = "https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/3.2.1/file-sink-rabbit-3.2.1.jar"; Resource metadataResource = appResourceCommon.getMetadataResource(new URI(appUri), null); - assertTrue(metadataResource instanceof UrlResource); + assertThat(metadataResource instanceof UrlResource).isTrue(); assertThat(metadataResource.getURI().toString().equals(appUri)); } @Test public void testMetadataUriDockerApp() throws Exception { - String appUri = "docker:springcloudstream/log-sink-rabbit:1.2.0.RELEASE"; + String appUri = "docker:springcloudstream/log-sink-rabbit:3.2.1"; Resource metadataResource = appResourceCommon.getMetadataResource(new URI(appUri), null); assertThat(metadataResource).isNotNull(); - assertTrue(metadataResource instanceof DockerResource); + assertThat(metadataResource instanceof DockerResource).isTrue(); } @Test public void testResourceURIWithMissingFileNameExtension() throws Exception { UrlResource urlResource = new UrlResource("https://com.com-0.0.2-SNAPSHOT/test"); - try { - appResourceCommon.getUrlResourceVersion(urlResource); - fail("Excepted IllegalArgumentException for an invalid URI path"); - } - catch (Exception e) { - assertThat(e.getMessage().equals("URI file name extension doesn't exist")); - } + assertThatThrownBy(() -> appResourceCommon.getUrlResourceVersion(urlResource)) + .hasMessage("URI file name extension doesn't exist"); } @Test public void testInvalidUrlResourceURI() throws Exception { UrlResource urlResource = new UrlResource("https://com.com-0.0.2-SNAPSHOT/test.zip"); - try { - appResourceCommon.getUrlResourceVersion(urlResource); - fail("Excepted IllegalArgumentException for an invalid URL resource URI"); - } - catch (Exception e) { - assertThat(e.getMessage().equals("Could not parse version from https://com.com-0.0.2-SNAPSHOT/test.zip, expected format is -.jar")); - } + assertThatThrownBy(() -> appResourceCommon.getUrlResourceVersion(urlResource)) + .hasMessageStartingWith("Could not parse version from https://com.com-0.0.2-SNAPSHOT/test.zip, expected format is -.jar"); } @Test public void testJars() throws MalformedURLException { //Dashes in artifact name - UrlResource urlResource = new UrlResource("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit-1.2.0.RELEASE.jar"); + UrlResource urlResource = new UrlResource("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit-3.2.1.jar"); String version = appResourceCommon.getUrlResourceVersion(urlResource); - assertThat(version).isEqualTo("1.2.0.RELEASE"); + assertThat(version).isEqualTo("3.2.1"); String theRest = appResourceCommon.getResourceWithoutVersion(urlResource); - assertThat(theRest).isEqualTo("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit"); + assertThat(theRest).isEqualTo("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit"); //No dashes in artfiact name - BUILD-SNAPSHOT - urlResource = new UrlResource("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file/file-1.2.0.BUILD-SNAPSHOT.jar"); + urlResource = new UrlResource("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file/file-3.2.1-SNAPSHOT.jar"); version = appResourceCommon.getUrlResourceVersion(urlResource); - assertThat(version).isEqualTo("1.2.0.BUILD-SNAPSHOT"); + assertThat(version).isEqualTo("3.2.1-SNAPSHOT"); theRest = appResourceCommon.getResourceWithoutVersion(urlResource); - assertThat(theRest).isEqualTo("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file/file"); + assertThat(theRest).isEqualTo("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file/file"); //No dashes in artfiact name - RELEASE - urlResource = new UrlResource("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file/file-1.2.0.RELEASE.jar"); + urlResource = new UrlResource("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file/file-3.2.1.jar"); version = appResourceCommon.getUrlResourceVersion(urlResource); - assertThat(version).isEqualTo("1.2.0.RELEASE"); + assertThat(version).isEqualTo("3.2.1"); theRest = appResourceCommon.getResourceWithoutVersion(urlResource); - assertThat(theRest).isEqualTo("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file/file"); + assertThat(theRest).isEqualTo("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file/file"); //Spring style snapshots naming scheme - urlResource = new UrlResource("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit-1.2.0.BUILD-SNAPSHOT.jar"); + urlResource = new UrlResource("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit-3.2.1-SNAPSHOT.jar"); version = appResourceCommon.getUrlResourceVersion(urlResource); - assertThat(version).isEqualTo("1.2.0.BUILD-SNAPSHOT"); + assertThat(version).isEqualTo("3.2.1-SNAPSHOT"); theRest = appResourceCommon.getResourceWithoutVersion(urlResource); - assertThat(theRest).isEqualTo("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit"); + assertThat(theRest).isEqualTo("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit"); //Standard maven style naming scheme - urlResource = new UrlResource("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit-1.2.0-SNAPSHOT.jar"); + urlResource = new UrlResource("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit-3.2.1-SNAPSHOT.jar"); version = appResourceCommon.getUrlResourceVersion(urlResource); - assertThat(version).isEqualTo("1.2.0-SNAPSHOT"); + assertThat(version).isEqualTo("3.2.1-SNAPSHOT"); theRest = appResourceCommon.getResourceWithoutVersion(urlResource); - assertThat(theRest).isEqualTo("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit"); + assertThat(theRest).isEqualTo("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit"); } @Test public void testGetResourceWithoutVersion() { assertThat(appResourceCommon.getResourceWithoutVersion( - MavenResource.parse("org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:war:exec:1.3.0.RELEASE"))) + MavenResource.parse("org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:war:exec:3.2.1"))) .isEqualTo("maven://org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:war:exec"); assertThat(appResourceCommon.getResourceWithoutVersion( - MavenResource.parse("org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit::exec:1.3.0.RELEASE"))) + MavenResource.parse("org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit::exec:3.2.1"))) .isEqualTo("maven://org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:jar:exec"); assertThat(appResourceCommon.getResourceWithoutVersion( - MavenResource.parse("org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:1.3.0.RELEASE"))) + MavenResource.parse("org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:3.2.1"))) .isEqualTo("maven://org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:jar"); } @Test public void testGetResource() { - String mavenUri = "maven://org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:1.3.0.RELEASE"; + String mavenUri = "maven://org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:3.2.1"; Resource resource = appResourceCommon.getResource(mavenUri); assertThat(resource).isInstanceOf(MavenResource.class); } @Test public void testGetResourceVersion() { - String mavenUri = "maven://org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:1.3.0.RELEASE"; + String mavenUri = "maven://org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:3.2.1"; String version = appResourceCommon.getResourceVersion(appResourceCommon.getResource(mavenUri)); - assertThat(version).isEqualTo("1.3.0.RELEASE"); + assertThat(version).isEqualTo("3.2.1"); } @Test public void testGetMetadataResourceVersion() { - String httpUri = "http://repo.spring.io/release/org/springframework/cloud/stream/app/cassandra-sink-rabbit/2.1.0.BUILD-SNAPSHOT/cassandra-sink-rabbit-2.1.0.BUILD-SNAPSHOT-metadata.jar"; + String httpUri = "http://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/cassandra-sink-rabbit/3.2.1-SNAPSHOT/cassandra-sink-rabbit-3.2.1-SNAPSHOT-metadata.jar"; String version = appResourceCommon.getResourceVersion(appResourceCommon.getResource(httpUri)); - assertThat(version).isEqualTo("2.1.0.BUILD-SNAPSHOT"); + assertThat(version).isEqualTo("3.2.1-SNAPSHOT"); } } diff --git a/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/DockerImageTests.java b/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/DockerImageTests.java index 5b12270c61..35f903fec4 100644 --- a/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/DockerImageTests.java +++ b/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/DockerImageTests.java @@ -17,118 +17,77 @@ /** * @author Mark Pollack + * @author Corneil du Plessis */ import java.util.ArrayList; +import java.util.Arrays; import java.util.List; +import java.util.stream.Stream; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import static org.junit.Assert.assertEquals; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; +import org.junit.jupiter.params.provider.MethodSource; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.params.provider.Arguments.arguments; /** * Test for DockerImage parsing methods Code from https://github.com/vmware/admiral */ -@RunWith(Parameterized.class) -public class DockerImageTests { - private final String description; - - private final String fullImageName; - - private final String expectedHost; - - private final String expectedNamespace; - - private final String expectedRepo; - - private final String expectedNamespaceAndRepo; - - private final String expectedTag; - - /** - * @param expectedHost - * @param expectedNamespace - * @param expectedRepo - */ - public DockerImageTests(String description, String fullImageName, String expectedHost, - String expectedNamespace, - String expectedRepo, - String expectedNamespaceAndRepo, - String expectedTag) { - - this.description = description; - this.fullImageName = fullImageName; - this.expectedHost = expectedHost; - this.expectedNamespace = expectedNamespace; - this.expectedRepo = expectedRepo; - this.expectedNamespaceAndRepo = expectedNamespaceAndRepo; - this.expectedTag = expectedTag; - } - - @Parameterized.Parameters - public static List data() { - List data = new ArrayList<>(); - data.add(new String[] { "all sections", "myhost:300/namespace/repo:tag", "myhost:300", - "namespace", "repo", "namespace/repo", "tag" }); - - data.add(new String[] { "repo and tag", "repo:tag", null, null, "repo", "library/repo", - "tag" }); - data.add(new String[] { "implicit registry, repo and tag", "library/repo:tag", null, - "library", "repo", "library/repo", "tag" }); - - data.add(new String[] { "repo without tag", "repo", null, null, "repo", "library/repo", - "latest" }); - - data.add(new String[] { "namespace and repo", "namespace/repo", null, "namespace", "repo", - "namespace/repo", "latest" }); - - data.add(new String[] { "host with dot and repo", "host.name/repo", "host.name", null, - "repo", "repo", "latest" }); - - data.add(new String[] { "host with colon and repo", "host:3000/repo", "host:3000", null, - "repo", "repo", "latest" }); - - data.add(new String[] { "host with colon, repo and tag", "host:3000/repo:tag", "host:3000", - null, "repo", "repo", "tag" }); - - data.add(new String[] { "official repo with default namespace", - "registry.hub.docker.com/library/repo:tag", "registry.hub.docker.com", "library", - "repo", "library/repo", "tag" }); - - data.add(new String[] { "official repo with custom namespace", - "registry.hub.docker.com/user/repo:tag", "registry.hub.docker.com", "user", "repo", - "user/repo", "tag" }); - - data.add(new String[] { "official repo with default namespace", - "docker.io/library/repo:tag", "docker.io", "library", "repo", "library/repo", - "tag" }); - - data.add(new String[] { "official repo with custom namespace", - "docker.io/user/repo:tag", "docker.io", "user", "repo", "user/repo", "tag" }); - - data.add(new String[] { "host and three path components of repo", - "host/namespace/category/repo", "host", "namespace/category", "repo", - "namespace/category/repo", "latest" }); +public class DockerImageTests { - data.add(new String[] { "host, port, three path components of repo and tag", - "host:5000/namespace/category/repo:tag", "host:5000", "namespace/category", "repo", - "namespace/category/repo", "tag" }); - return data; + static class DockerImageNames implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext extensionContext) throws Exception { + List data = new ArrayList<>(); + data.add(arguments("all sections", "myhost:300/namespace/repo:tag", "myhost:300", "namespace", "repo", + "namespace/repo", "tag")); + data.add(arguments("repo and tag", "repo:tag", null, null, "repo", "library/repo", "tag")); + data.add(arguments("implicit registry, repo and tag", "library/repo:tag", null, "library", "repo", + "library/repo", "tag")); + data.add(arguments("repo without tag", "repo", null, null, "repo", "library/repo", "latest")); + data.add(arguments("namespace and repo", "namespace/repo", null, "namespace", "repo", "namespace/repo", + "latest")); + data.add(arguments("host with dot and repo", "host.name/repo", "host.name", null, "repo", "repo", + "latest")); + data.add(arguments("host with colon and repo", "host:3000/repo", "host:3000", null, "repo", "repo", + "latest")); + data.add(arguments("host with colon, repo and tag", "host:3000/repo:tag", "host:3000", null, "repo", + "repo", "tag")); + data.add(arguments("official repo with default namespace", "registry.hub.docker.com/library/repo:tag", + "registry.hub.docker.com", "library", "repo", "library/repo", "tag")); + data.add(arguments("official repo with custom namespace", "registry.hub.docker.com/user/repo:tag", + "registry.hub.docker.com", "user", "repo", "user/repo", "tag")); + data.add(arguments("official repo with default namespace", "docker.io/library/repo:tag", "docker.io", + "library", "repo", "library/repo", "tag")); + data.add(arguments("official repo with custom namespace", "docker.io/user/repo:tag", "docker.io", "user", + "repo", "user/repo", "tag")); + data.add(arguments("host and three path components of repo", "host/namespace/category/repo", "host", + "namespace/category", "repo", "namespace/category/repo", "latest")); + data.add(arguments("host, port, three path components of repo and tag", + "host:5000/namespace/category/repo:tag", "host:5000", "namespace/category", "repo", + "namespace/category/repo", "tag")); + + return data.stream(); + } } - @Test - public void testDockerImageParsing() { + @ParameterizedTest + @ArgumentsSource(DockerImageNames.class) + public void testDockerImageParsing(String description, String fullImageName, String expectedHost, String expectedNamespace, String expectedRepo, String expectedNamespaceAndRepo, String expectedTag) { DockerImage dockerImage = DockerImage.fromImageName(fullImageName); - assertEquals(description + ": host", expectedHost, dockerImage.getHost()); - assertEquals(description + ": namespace", expectedNamespace, dockerImage.getNamespace()); - assertEquals(description + ": repository", expectedRepo, dockerImage.getRepository()); - assertEquals(description + ": namespace and repo", expectedNamespaceAndRepo, - dockerImage.getNamespaceAndRepo()); - assertEquals(description + ": tag", expectedTag, dockerImage.getTag()); + assertEquals( expectedHost, dockerImage.getHost(), description + ": host"); + assertEquals(expectedNamespace, dockerImage.getNamespace(), description + ": namespace"); + assertEquals(expectedRepo, dockerImage.getRepository(), description + ": repository"); + assertEquals(expectedNamespaceAndRepo, dockerImage.getNamespaceAndRepo(), description + ": namespace and repo"); + assertEquals(expectedTag, dockerImage.getTag(), description + ": tag"); } } diff --git a/spring-cloud-dataflow-registry/src/test/resources/org/springframework/cloud/dataflow/registry/service/AppRegistryTests-importBootVersionsMissingURI.properties b/spring-cloud-dataflow-registry/src/test/resources/org/springframework/cloud/dataflow/registry/service/AppRegistryTests-importBootVersionsMissingURI.properties new file mode 100644 index 0000000000..282c73d19c --- /dev/null +++ b/spring-cloud-dataflow-registry/src/test/resources/org/springframework/cloud/dataflow/registry/service/AppRegistryTests-importBootVersionsMissingURI.properties @@ -0,0 +1,6 @@ +source.time=maven://org.springframework.cloud.stream.app:time-source-rabbit:3.0.0 +source.time.metadata=maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:3.0.0 +source.time.bootVersion=3 +source.time=maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1 +source.time.metadata=maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1 +source.timestamp.bootVersion=3 diff --git a/spring-cloud-dataflow-registry/src/test/resources/org/springframework/cloud/dataflow/registry/service/AppRegistryTests-importInvalidBootVersions.properties b/spring-cloud-dataflow-registry/src/test/resources/org/springframework/cloud/dataflow/registry/service/AppRegistryTests-importInvalidBootVersions.properties new file mode 100644 index 0000000000..213c0737d7 --- /dev/null +++ b/spring-cloud-dataflow-registry/src/test/resources/org/springframework/cloud/dataflow/registry/service/AppRegistryTests-importInvalidBootVersions.properties @@ -0,0 +1,5 @@ +source.time=maven://org.springframework.cloud.stream.app:time-source-rabbit:3.0.0 +source.time.metadata=maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:3.0.0 +source.time.bootVersion=3.0 +source.time=maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1 +source.time.metadata=maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1 diff --git a/spring-cloud-dataflow-registry/src/test/resources/org/springframework/cloud/dataflow/registry/service/AppRegistryTests-importMultipleBootVersions.properties b/spring-cloud-dataflow-registry/src/test/resources/org/springframework/cloud/dataflow/registry/service/AppRegistryTests-importMultipleBootVersions.properties new file mode 100644 index 0000000000..5ef4e7c479 --- /dev/null +++ b/spring-cloud-dataflow-registry/src/test/resources/org/springframework/cloud/dataflow/registry/service/AppRegistryTests-importMultipleBootVersions.properties @@ -0,0 +1,5 @@ +source.time=maven://org.springframework.cloud.stream.app:time-source-rabbit:3.0.0 +source.time.metadata=maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:3.0.0 +source.time.bootVersion=3 +source.time=maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1 +source.time.metadata=maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1 diff --git a/spring-cloud-dataflow-rest-client/pom.xml b/spring-cloud-dataflow-rest-client/pom.xml index 6f6a8d291b..d3667df3f8 100644 --- a/spring-cloud-dataflow-rest-client/pom.xml +++ b/spring-cloud-dataflow-rest-client/pom.xml @@ -4,11 +4,23 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-rest-client + spring-cloud-dataflow-rest-client + Data Flow Rest Client + jar + + true + 3.4.1 + + + org.springframework.security + spring-security-oauth2-client + com.fasterxml.jackson.datatype jackson-datatype-jdk8 @@ -30,18 +42,15 @@ spring-boot-configuration-processor true - - org.springframework.boot - spring-boot-starter-test - test - org.springframework.cloud spring-cloud-skipper + ${project.version} org.springframework.cloud spring-cloud-dataflow-rest-resource + ${project.version} org.springframework.cloud @@ -58,5 +67,50 @@ jsr305 provided + + org.springframework.boot + spring-boot-starter-test + test + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + + + source + + jar + + package + + 3.3.0 + + + diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryOperations.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryOperations.java index b89878e760..5ff1f161b2 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryOperations.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2019 the original author or authors. + * Copyright 2015-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,6 +21,7 @@ import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.rest.resource.AppRegistrationResource; import org.springframework.cloud.dataflow.rest.resource.DetailedAppRegistrationResource; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.hateoas.PagedModel; /** @@ -32,6 +33,7 @@ * @author Patrick Peralta * @author Mark Fisher * @author Chris Schaefer + * @author Chris Bono */ public interface AppRegistryOperations { @@ -81,9 +83,24 @@ public interface AppRegistryOperations { * @param metadataUri URI for the application metadata artifact * @param force if {@code true}, overwrites a pre-existing registration * @return the new app registration + * @deprecated in favor of {@link #register(String, ApplicationType, String, String, AppBootSchemaVersion, boolean)} */ + @Deprecated AppRegistrationResource register(String name, ApplicationType type, String uri, String metadataUri, boolean force); + /** + * Register an application name, type, and boot version with its Maven coordinates. + * + * @param name application name + * @param type application type + * @param uri URI for the application artifact + * @param metadataUri URI for the application metadata artifact + * @param bootVersion application boot version + * @param force if {@code true}, overwrites a pre-existing registration + * @return the new app registration + */ + AppRegistrationResource register(String name, ApplicationType type, String uri, String metadataUri, AppBootSchemaVersion bootVersion, boolean force); + /** * Register an application name, type and version with its Maven coordinates. * @@ -94,10 +111,32 @@ public interface AppRegistryOperations { * @param metadataUri URI for the application metadata artifact * @param force if {@code true}, overwrites a pre-existing registration * @return the new app registration + * @deprecated in favor of {@link #register(String, ApplicationType, String, String, String, AppBootSchemaVersion, boolean)} */ + @Deprecated AppRegistrationResource register(String name, ApplicationType type, String version, String uri, String metadataUri, boolean force); + /** + * Register an application name, type, boot version, and version with its Maven coordinates. + * + * @param name application name + * @param type application type + * @param version application version + * @param uri URI for the application artifact + * @param metadataUri URI for the application metadata artifact + * @param bootVersion application boot version + * @param force if {@code true}, overwrites a pre-existing registration + * @return the new app registration + */ + AppRegistrationResource register(String name, + ApplicationType type, + String version, + String uri, + String metadataUri, + AppBootSchemaVersion bootVersion, + boolean force); + /** * Unregister an application name and type. * diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryTemplate.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryTemplate.java index 96ce270cd5..de8c788491 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryTemplate.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryTemplate.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2019 the original author or authors. + * Copyright 2015-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,6 +21,7 @@ import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.rest.resource.AppRegistrationResource; import org.springframework.cloud.dataflow.rest.resource.DetailedAppRegistrationResource; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.hateoas.Link; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.RepresentationModel; @@ -40,6 +41,8 @@ * @author Patrick Peralta * @author Christian Tzolov * @author Chris Schaefer + * @author Chris Bono + * @author Corneil du Plessis */ public class AppRegistryTemplate implements AppRegistryOperations { /** @@ -60,7 +63,7 @@ public class AppRegistryTemplate implements AppRegistryOperations { /** * Construct a {@code AppRegistryTemplate} object. * - * @param restTemplate template for HTTP/rest commands + * @param restTemplate template for HTTP/rest commands * @param resourceSupport HATEOAS link support */ public AppRegistryTemplate(RestTemplate restTemplate, RepresentationModel resourceSupport) { @@ -112,31 +115,61 @@ public DetailedAppRegistrationResource info(String name, ApplicationType type, S } @Override - public AppRegistrationResource register(String name, ApplicationType type, String uri, String metadataUri, - boolean force) { - MultiValueMap values = new LinkedMultiValueMap(); - values.add("uri", uri); - if (metadataUri != null) { - values.add("metadata-uri", metadataUri); - } - values.add("force", Boolean.toString(force)); + public AppRegistrationResource register(String name, ApplicationType type, String uri, String metadataUri, boolean force) { + return register(name, type, uri, metadataUri, (AppBootSchemaVersion) null, force); + } + @Override + public AppRegistrationResource register( + String name, + ApplicationType type, + String uri, + String metadataUri, + AppBootSchemaVersion bootVersion, + boolean force + ) { + MultiValueMap values = valuesForRegisterPost(bootVersion, uri, metadataUri, force); return restTemplate.postForObject(appsLink.getHref() + "/{type}/{name}", values, AppRegistrationResource.class, type, name); } @Override public AppRegistrationResource register(String name, ApplicationType type, String version, String uri, - String metadataUri, boolean force) { + String metadataUri, boolean force) { + return this.register(name, type, version, uri, metadataUri, null, force); + } + + @Override + public AppRegistrationResource register( + String name, + ApplicationType type, + String version, + String uri, + String metadataUri, + AppBootSchemaVersion bootVersion, + boolean force + ) { + MultiValueMap values = valuesForRegisterPost(bootVersion, uri, metadataUri, force); + return restTemplate.postForObject(appsLink.getHref() + "/{type}/{name}/{version}", values, + AppRegistrationResource.class, type, name, version); + } + + private MultiValueMap valuesForRegisterPost( + AppBootSchemaVersion bootVersion, + String uri, + String metadataUri, + boolean force + ) { MultiValueMap values = new LinkedMultiValueMap<>(); values.add("uri", uri); if (metadataUri != null) { values.add("metadata-uri", metadataUri); } + if (bootVersion != null) { + values.add("bootVersion", bootVersion.getBootVersion()); + } values.add("force", Boolean.toString(force)); - - return restTemplate.postForObject(appsLink.getHref() + "/{type}/{name}/{version}", values, - AppRegistrationResource.class, type, name, version); + return values; } @Override diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/DataFlowTemplate.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/DataFlowTemplate.java index 394f12b27c..6f330cb030 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/DataFlowTemplate.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/DataFlowTemplate.java @@ -21,6 +21,8 @@ import java.util.Map; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import org.springframework.cloud.dataflow.rest.Version; import org.springframework.cloud.dataflow.rest.resource.RootResource; @@ -65,7 +67,7 @@ public class DataFlowTemplate implements DataFlowOperations { /** * Holds discovered URLs of the API. */ - protected final Map resources = new HashMap(); + protected final Map resources = new HashMap<>(); /** * REST client for stream operations. @@ -126,8 +128,8 @@ public class DataFlowTemplate implements DataFlowOperations { * * @param baseURI Must not be null */ - public DataFlowTemplate(URI baseURI) { - this(baseURI, getDefaultDataflowRestTemplate()); + public DataFlowTemplate(URI baseURI, ObjectMapper mapper) { + this(baseURI, getDefaultDataflowRestTemplate(), mapper); } /** @@ -135,10 +137,10 @@ public DataFlowTemplate(URI baseURI) { * missing Mixins for Jackson will be added implicitly. For more information, please * see {@link #prepareRestTemplate(RestTemplate)}. * - * @param baseURI Must not be null + * @param baseURI Must not be null * @param restTemplate Must not be null */ - public DataFlowTemplate(URI baseURI, RestTemplate restTemplate) { + public DataFlowTemplate(URI baseURI, RestTemplate restTemplate, ObjectMapper mapper) { Assert.notNull(baseURI, "The provided baseURI must not be null."); Assert.notNull(restTemplate, "The provided restTemplate must not be null."); @@ -170,31 +172,37 @@ public DataFlowTemplate(URI baseURI, RestTemplate restTemplate) { if (resourceSupport.hasLink(StreamTemplate.DEFINITIONS_REL)) { this.streamOperations = new StreamTemplate(restTemplate, resourceSupport, getVersion()); this.runtimeOperations = new RuntimeTemplate(restTemplate, resourceSupport); - } - else { + } else { this.streamOperations = null; this.runtimeOperations = null; } if (resourceSupport.hasLink(TaskTemplate.DEFINITIONS_RELATION)) { + if (mapper == null) { + mapper = new ObjectMapper(); + mapper.registerModule(new Jdk8Module()); + mapper.registerModule(new Jackson2HalModule()); + mapper.registerModule(new JavaTimeModule()); + mapper.registerModule(new Jackson2DataflowModule()); + } this.taskOperations = new TaskTemplate(restTemplate, resourceSupport, getVersion()); this.jobOperations = new JobTemplate(restTemplate, resourceSupport); - if(resourceSupport.hasLink(SchedulerTemplate.SCHEDULES_RELATION)) { + if (resourceSupport.hasLink(SchedulerTemplate.SCHEDULES_RELATION)) { this.schedulerOperations = new SchedulerTemplate(restTemplate, resourceSupport); - } - else { + } else { schedulerOperations = null; } - } - else { + } else { this.taskOperations = null; this.jobOperations = null; this.schedulerOperations = null; } this.appRegistryOperations = new AppRegistryTemplate(restTemplate, resourceSupport); - this.completionOperations = new CompletionTemplate(restTemplate, - resourceSupport.getLink("completions/stream").get(), resourceSupport.getLink("completions/task").get()); - } - else { + this.completionOperations = new CompletionTemplate( + restTemplate, + resourceSupport.getLink("completions/stream").get(), + resourceSupport.getLink("completions/task").get() + ); + } else { this.aboutOperations = null; this.streamOperations = null; this.runtimeOperations = null; @@ -209,7 +217,7 @@ public DataFlowTemplate(URI baseURI, RestTemplate restTemplate) { private String getVersion() { String version = ""; AboutResource aboutResource = this.aboutOperations.get(); - if(aboutResource != null) { + if (aboutResource != null) { version = aboutResource.getVersionInfo().getCore().getVersion(); } return version; @@ -228,7 +236,7 @@ private String getVersion() { *
  • {@link ExecutionContextJacksonMixIn} *
  • {@link StepExecutionHistoryJacksonMixIn} * - * + *

    * Furthermore, this method will also register the {@link Jackson2HalModule} * * @param restTemplate Can be null. Instantiates a new {@link RestTemplate} if null @@ -268,7 +276,7 @@ public static RestTemplate prepareRestTemplate(RestTemplate restTemplate) { public static ObjectMapper prepareObjectMapper(ObjectMapper objectMapper) { Assert.notNull(objectMapper, "The objectMapper must not be null."); return objectMapper - .registerModules(new Jackson2HalModule(), new Jackson2DataflowModule()); + .registerModules(new Jackson2HalModule(), new Jackson2DataflowModule()); } /** @@ -281,12 +289,9 @@ public static RestTemplate getDefaultDataflowRestTemplate() { } public Link getLink(RepresentationModel resourceSupport, String rel) { - Link link = resourceSupport.getLink(rel).get(); - if (link == null) { - throw new DataFlowServerException( - "Server did not return a link for '" + rel + "', links: '" + resourceSupport + "'"); - } - return link; + return resourceSupport.getLink(rel).orElseThrow(() -> + new DataFlowServerException("Server did not return a link for '" + rel + "', links: '" + resourceSupport + "'") + ); } @Override diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobOperations.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobOperations.java index ed575f0cd8..9f531aa6aa 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobOperations.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobOperations.java @@ -38,9 +38,10 @@ public interface JobOperations { /** * Restarts a job by id * - * @param id job execution id + * @param id job execution id + * @param schemaTarget the schema target for the job execution */ - void executionRestart(long id); + void executionRestart(long id, String schemaTarget); /** * @return the list job executions without step executions known to the system. @@ -69,39 +70,42 @@ public interface JobOperations { */ PagedModel executionListByJobName(String jobName); - /** * Return the {@link JobExecutionResource} for the id specified. * - * @param id identifier of the job execution + * @param id identifier of the job execution + * @param schemaTarget the schema target for the job execution * @return {@link JobExecutionResource} */ - JobExecutionResource jobExecution(long id); + JobExecutionResource jobExecution(long id, String schemaTarget); /** * Return the {@link JobInstanceResource} for the id specified. * - * @param id identifier of the job instasnce + * @param id identifier of the job instance + * @param schemaTarget the schema target for the job instance * @return {@link JobInstanceResource} */ - JobInstanceResource jobInstance(long id); + JobInstanceResource jobInstance(long id, String schemaTarget); /** * List step executions known for a specific job execution id. * * @param jobExecutionId the id of the job execution. + * @param schemaTarget the schema target for the job execution * @return the paged list of step executions */ - PagedModel stepExecutionList(long jobExecutionId); + PagedModel stepExecutionList(long jobExecutionId, String schemaTarget); /** * Return StepExecutionProgressInfoResource for a specific job execution id and step * execution Id. * - * @param jobExecutionId the id of the job execution for the step to be returned. + * @param jobExecutionId the id of the job execution for the step to be returned. * @param stepExecutionId the id step execution to be returned. + * @param schemaTarget the schema target of the job execution. * @return the step execution progress info */ - StepExecutionProgressInfoResource stepExecutionProgress(long jobExecutionId, long stepExecutionId); + StepExecutionProgressInfoResource stepExecutionProgress(long jobExecutionId, long stepExecutionId, String schemaTarget); } diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobTemplate.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobTemplate.java index a88f635562..5ba33e0c75 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobTemplate.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobTemplate.java @@ -16,6 +16,10 @@ package org.springframework.cloud.dataflow.rest.client; +import java.time.temporal.ValueRange; +import java.util.HashMap; +import java.util.Map; + import org.springframework.cloud.dataflow.rest.resource.JobExecutionResource; import org.springframework.cloud.dataflow.rest.resource.JobExecutionThinResource; import org.springframework.cloud.dataflow.rest.resource.JobInstanceResource; @@ -25,7 +29,12 @@ import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.RepresentationModel; import org.springframework.util.Assert; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; +import org.springframework.util.StringUtils; import org.springframework.web.client.RestTemplate; +import org.springframework.web.util.DefaultUriBuilderFactory; +import org.springframework.web.util.UriComponentsBuilder; /** * Implementation for {@link JobOperations}. @@ -34,11 +43,11 @@ */ public class JobTemplate implements JobOperations { - private static final String EXECUTIONS_RELATION = "jobs/thinexecutions"; - + private static final String EXECUTIONS_THIN_RELATION = "jobs/thinexecutions"; + private static final String EXECUTIONS_RELATION = "jobs/executions"; private static final String EXECUTION_RELATION = "jobs/executions/execution"; - - private static final String EXECUTION_RELATION_BY_NAME = "jobs/thinexecutions/name"; + private static final String EXECUTION_RELATION_BY_NAME = "jobs/executions/name"; + private static final String EXECUTION_THIN_RELATION_BY_NAME = "jobs/thinexecutions/name"; private static final String INSTANCE_RELATION = "jobs/instances/instance"; @@ -51,6 +60,9 @@ public class JobTemplate implements JobOperations { private final RestTemplate restTemplate; private final Link executionsLink; + private final Link thinExecutionsLink; + + private final Link thinExecutionByNameLink; private final Link executionLink; @@ -67,21 +79,25 @@ public class JobTemplate implements JobOperations { JobTemplate(RestTemplate restTemplate, RepresentationModel resources) { Assert.notNull(resources, "URI CollectionModel must not be be null"); Assert.notNull(restTemplate, "RestTemplate must not be null"); - Assert.notNull(resources.getLink(EXECUTIONS_RELATION), "Executions relation is required"); - Assert.notNull(resources.getLink(EXECUTION_RELATION), "Execution relation is required"); - Assert.notNull(resources.getLink(EXECUTION_RELATION_BY_NAME), "Execution by name relation is required"); - Assert.notNull(resources.getLink(INSTANCE_RELATION), "Instance relation is required"); - Assert.notNull(resources.getLink(INSTANCE_RELATION_BY_NAME), "Instance by name relation is required"); - Assert.notNull(resources.getLink(STEP_EXECUTION_RELATION_BY_ID), "Step Execution by id relation is required"); - Assert.notNull(resources.getLink(STEP_EXECUTION_PROGRESS_RELATION_BY_ID), + Assert.isTrue(resources.getLink(EXECUTIONS_RELATION).isPresent(), "Executions relation is required"); + Assert.isTrue(resources.getLink(EXECUTIONS_THIN_RELATION).isPresent(), "Executions thin relation is required"); + Assert.isTrue(resources.getLink(EXECUTION_THIN_RELATION_BY_NAME).isPresent(), "Executions thin relation is required"); + Assert.isTrue(resources.getLink(EXECUTION_RELATION).isPresent(), "Execution relation is required"); + Assert.isTrue(resources.getLink(EXECUTION_RELATION_BY_NAME).isPresent(), "Execution by name relation is required"); + Assert.isTrue(resources.getLink(INSTANCE_RELATION).isPresent(), "Instance relation is required"); + Assert.isTrue(resources.getLink(INSTANCE_RELATION_BY_NAME).isPresent(), "Instance by name relation is required"); + Assert.isTrue(resources.getLink(STEP_EXECUTION_RELATION_BY_ID).isPresent(), "Step Execution by id relation is required"); + Assert.isTrue(resources.getLink(STEP_EXECUTION_PROGRESS_RELATION_BY_ID).isPresent(), "Step Execution Progress by id " + "relation is required"); - Assert.notNull(resources.getLink(STEP_EXECUTION_PROGRESS_RELATION_BY_ID), + Assert.isTrue(resources.getLink(STEP_EXECUTION_PROGRESS_RELATION_BY_ID).isPresent(), "Step Execution View by id relation" + " is required"); this.restTemplate = restTemplate; this.executionsLink = resources.getLink(EXECUTIONS_RELATION).get(); + this.thinExecutionsLink = resources.getLink(EXECUTIONS_THIN_RELATION).get(); this.executionLink = resources.getLink(EXECUTION_RELATION).get(); this.executionByNameLink = resources.getLink(EXECUTION_RELATION_BY_NAME).get(); + this.thinExecutionByNameLink = resources.getLink(EXECUTION_THIN_RELATION_BY_NAME).get(); this.instanceLink = resources.getLink(INSTANCE_RELATION).get(); this.instanceByNameLink = resources.getLink(INSTANCE_RELATION_BY_NAME).get(); this.stepExecutionsLink = resources.getLink(STEP_EXECUTION_RELATION_BY_ID).get(); @@ -90,26 +106,24 @@ public class JobTemplate implements JobOperations { @Override public PagedModel executionList() { - String uriTemplate = executionsLink.getHref(); - uriTemplate = uriTemplate + "?size=2000"; - - return restTemplate.getForObject(uriTemplate, JobExecutionResource.Page.class); + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(executionsLink.getHref()).queryParam("size", "2000"); + return restTemplate.getForObject(builder.toUriString(), JobExecutionResource.Page.class); } @Override - public void executionRestart(long id) { - String uriTemplate = executionLink.expand(id).getHref(); - uriTemplate = uriTemplate + "?restart=true"; + public void executionRestart(long id, String schemaTarget) { + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(executionLink.expand(id).getHref()).queryParam("restart", "true"); - restTemplate.put(uriTemplate, null); + if (StringUtils.hasText(schemaTarget)) { + builder.queryParam("schemaTarget", schemaTarget); + } + restTemplate.put(builder.toUriString(), null); } @Override public PagedModel executionThinList() { - String uriTemplate = executionsLink.getHref(); - uriTemplate = uriTemplate + "?size=2000"; - - return restTemplate.getForObject(uriTemplate, JobExecutionThinResource.Page.class); + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(thinExecutionsLink.getHref()).queryParam("size", "2000"); + return restTemplate.getForObject(builder.toUriString(), JobExecutionThinResource.Page.class); } @Override @@ -119,36 +133,50 @@ public PagedModel instanceList(String jobName) { @Override public PagedModel executionThinListByJobName(String jobName) { - return restTemplate.getForObject(executionByNameLink.expand(jobName).getHref(), - JobExecutionThinResource.Page.class); + return restTemplate.getForObject(thinExecutionByNameLink.expand(jobName).getHref(), JobExecutionThinResource.Page.class); } @Override public PagedModel executionListByJobName(String jobName) { - return restTemplate.getForObject(executionByNameLink.expand(jobName).getHref(), - JobExecutionResource.Page.class); + return restTemplate.getForObject(executionByNameLink.expand(jobName).getHref(), JobExecutionResource.Page.class); } @Override - public JobExecutionResource jobExecution(long id) { - return restTemplate.getForObject(executionLink.expand(id).getHref(), JobExecutionResource.class); + public JobExecutionResource jobExecution(long id, String schemaTarget) { + String url = executionLink.expand(id).getHref(); + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(url); + if (StringUtils.hasText(schemaTarget)) { + builder.queryParam("schemaTarget", schemaTarget); + } + + return restTemplate.getForObject(builder.toUriString(), JobExecutionResource.class); } @Override - public JobInstanceResource jobInstance(long id) { - return restTemplate.getForObject(instanceLink.expand(id).getHref(), JobInstanceResource.class); + public JobInstanceResource jobInstance(long id, String schemaTarget) { + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(instanceLink.expand(id).getHref()); + if (StringUtils.hasText(schemaTarget)) { + builder.queryParam("schemaTarget", schemaTarget); + } + return restTemplate.getForObject(builder.toUriString(), JobInstanceResource.class); } @Override - public PagedModel stepExecutionList(long jobExecutionId) { - return restTemplate.getForObject(stepExecutionsLink.expand(jobExecutionId).getHref(), - StepExecutionResource.Page.class); + public PagedModel stepExecutionList(long jobExecutionId, String schemaTarget) { + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(stepExecutionsLink.expand(jobExecutionId).getHref()); + if (StringUtils.hasText(schemaTarget)) { + builder.queryParam("schemaTarget", schemaTarget); + } + return restTemplate.getForObject(builder.toUriString(), StepExecutionResource.Page.class); } @Override - public StepExecutionProgressInfoResource stepExecutionProgress(long jobExecutionId, long stepExecutionId) { - return restTemplate.getForObject(stepExecutionProgressLink.expand(jobExecutionId, stepExecutionId).getHref(), - StepExecutionProgressInfoResource.class); + public StepExecutionProgressInfoResource stepExecutionProgress(long jobExecutionId, long stepExecutionId, String schemaTarget) { + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(stepExecutionProgressLink.expand(jobExecutionId, stepExecutionId).getHref()); + if (StringUtils.hasText(schemaTarget)) { + builder.queryParam("schemaTarget", schemaTarget); + } + return restTemplate.getForObject(builder.toUriString(), StepExecutionProgressInfoResource.class); } } diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeOperations.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeOperations.java index c7fea197af..0771a174b9 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeOperations.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2016 the original author or authors. + * Copyright 2015-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,15 +16,20 @@ package org.springframework.cloud.dataflow.rest.client; +import java.util.Map; + import org.springframework.cloud.dataflow.rest.resource.AppStatusResource; import org.springframework.cloud.dataflow.rest.resource.StreamStatusResource; import org.springframework.hateoas.PagedModel; +import org.springframework.http.HttpHeaders; /** * Defines operations available for obtaining information about deployed apps. * * @author Eric Bottard * @author Mark Fisher + * @author Chris Bono + * @author Corneil du Plessis */ public interface RuntimeOperations { @@ -44,4 +49,36 @@ public interface RuntimeOperations { * @return the runtime information about the deployed streams their apps and instances. */ PagedModel streamStatus(String... streamNames); + + /** + * Access an HTTP GET exposed actuator resource for a deployed app instance. + * + * @param appId the application id + * @param instanceId the application instance id + * @param endpoint the relative actuator path, e.g., {@code /info} + * @return the contents as JSON text + */ + String getFromActuator(String appId, String instanceId, String endpoint); + + /** + * Access an HTTP POST exposed actuator resource for a deployed app instance. + * + * @param appId the application id + * @param instanceId the application instance id + * @param endpoint the relative actuator path, e.g., {@code /info} + * @param data map representing the data to post on request body + * @return response from actuator + */ + Object postToActuator(String appId, String instanceId, String endpoint, Map data); + + /** + * Provides for POST to application HTTP endpoint exposed via url property. + * + * @param appId the application id + * @param instanceId the application instance id + * @param data data to send to url. The mimetype should be in the Content-Type header if important. + * @param headers post request headers. + * This method will return an exception + */ + void postToUrl(String appId, String instanceId, byte[] data, HttpHeaders headers); } diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeTemplate.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeTemplate.java index 1187398021..8a7ec49170 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeTemplate.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeTemplate.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2019 the original author or authors. + * Copyright 2015-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,11 +16,27 @@ package org.springframework.cloud.dataflow.rest.client; +import java.time.Duration; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.springframework.cloud.dataflow.rest.resource.AppStatusResource; import org.springframework.cloud.dataflow.rest.resource.StreamStatusResource; +import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; +import org.springframework.cloud.skipper.domain.ActuatorPostRequest; import org.springframework.hateoas.Link; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.RepresentationModel; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.ResponseEntity; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; import org.springframework.web.client.RestTemplate; /** @@ -29,8 +45,11 @@ * @author Eric Bottard * @author Mark Fisher * @author Christian Tzolov + * @author Chris Bono + * @author Corneil du Plessis */ public class RuntimeTemplate implements RuntimeOperations { + private static final Logger logger = LoggerFactory.getLogger(RuntimeTemplate.class); private final RestTemplate restTemplate; @@ -44,6 +63,16 @@ public class RuntimeTemplate implements RuntimeOperations { */ private final Link appStatusUriTemplate; + /** + * Uri template for accessing actuator endpoint on a single app. + */ + private final Link appActuatorUriTemplate; + + /** + * Uri template for posting to app instance with url attribute. + */ + private final Link appUrlPostUriTemplate; + /** * Uri template for accessing runtime status of selected streams, their apps and instances. */ @@ -51,27 +80,104 @@ public class RuntimeTemplate implements RuntimeOperations { RuntimeTemplate(RestTemplate restTemplate, RepresentationModel resources) { this.restTemplate = restTemplate; - this.appStatusesUriTemplate = resources.getLink("runtime/apps").get(); - this.appStatusUriTemplate = resources.getLink("runtime/apps/{appId}").get(); - this.streamStatusUriTemplate = resources.getLink("runtime/streams/{streamNames}").get(); + this.appStatusesUriTemplate = getLink("runtime/apps", resources, true); + this.appStatusUriTemplate = getLink("runtime/apps/{appId}", resources, true); + this.streamStatusUriTemplate = getLink("runtime/streams/{streamNames}", resources, true); + this.appActuatorUriTemplate = getLink("runtime/apps/{appId}/instances/{instanceId}/actuator", resources, false); + this.appUrlPostUriTemplate = getLink("runtime/apps/{appId}/instances/{instanceId}/post", resources, false); + } + + private Link getLink(String relationPath, RepresentationModel resources, boolean required) { + Optional link = resources.getLink(relationPath); + if (required && !link.isPresent()) { + throw new RuntimeException("Unable to retrieve URI template for " + relationPath); + } + return link.orElse(null); } @Override public PagedModel status() { String uriTemplate = this.appStatusesUriTemplate.expand().getHref(); - uriTemplate = uriTemplate + "?size=2000"; + uriTemplate = uriTemplate + "?size=2000"; // TODO is this valid? return this.restTemplate.getForObject(uriTemplate, AppStatusResource.Page.class); } @Override public AppStatusResource status(String deploymentId) { - return this.restTemplate.getForObject(appStatusUriTemplate.expand(deploymentId).getHref(), AppStatusResource.class); + return this.restTemplate.getForObject( + appStatusUriTemplate.expand(deploymentId).getHref(), + AppStatusResource.class + ); } @Override public PagedModel streamStatus(String... streamNames) { - return this.restTemplate.getForObject(streamStatusUriTemplate.expand(streamNames).getHref(), - StreamStatusResource.Page.class); + return this.restTemplate.getForObject( + streamStatusUriTemplate.expand(streamNames).getHref(), + StreamStatusResource.Page.class + ); } + @Override + public String getFromActuator(String appId, String instanceId, String endpoint) { + Assert.notNull(appActuatorUriTemplate, "actuator endpoint not found"); + String uri = appActuatorUriTemplate.expand(appId, instanceId, endpoint).getHref(); + return this.restTemplate.getForObject(uri, String.class); + } + + @Override + public Object postToActuator(String appId, String instanceId, String endpoint, Map body) { + Assert.notNull(appActuatorUriTemplate, "actuator endpoint not found"); + String uri = appActuatorUriTemplate.expand(appId, instanceId).getHref(); + ActuatorPostRequest actuatorPostRequest = new ActuatorPostRequest(); + actuatorPostRequest.setEndpoint(endpoint); + actuatorPostRequest.setBody(body); + return this.restTemplate.postForObject(uri, actuatorPostRequest, Object.class); + } + + @Override + public void postToUrl(String appId, String instanceId, byte[] data, HttpHeaders headers) { + Assert.notNull(appUrlPostUriTemplate, "post endpoint not found"); + String uri = appUrlPostUriTemplate.expand(appId, instanceId).getHref(); + waitForUrl(uri, Duration.ofSeconds(30)); + HttpEntity entity = new HttpEntity<>(data, headers); + if (logger.isDebugEnabled()) { + ArgumentSanitizer sanitizer = new ArgumentSanitizer(); + logger.debug("postToUrl:{}:{}:{}:{}", appId, instanceId, uri, sanitizer.sanitizeHeaders(headers)); + } + waitForUrl(uri, Duration.ofSeconds(30)); + ResponseEntity response = this.restTemplate.exchange(uri, HttpMethod.POST, entity, String.class); + if (!response.getStatusCode().is2xxSuccessful()) { + throw new RuntimeException("POST:exception:" + response.getStatusCode() + ":" + response.getBody()); + } + } + + private void waitForUrl(String uri, Duration timeout) { + // Check + final long waitUntilMillis = System.currentTimeMillis() + timeout.toMillis(); + do { + try { + Set allowed = this.restTemplate.optionsForAllow(uri); + if (!CollectionUtils.isEmpty(allowed)) { + break; + } + } catch (Throwable x) { + final String message = x.getMessage(); + if(message.contains("UnknownHostException")) { + logger.trace("waitForUrl:retry:exception:" + x); + continue; + } + if (message.contains("500")) { + break; + } else { + logger.trace("waitForUrl:exception:" + x); + } + } + try { + Thread.sleep(2000L); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } while (waitUntilMillis <= System.currentTimeMillis()); + } } diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskOperations.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskOperations.java index d1de94f656..b47bfba585 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskOperations.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskOperations.java @@ -23,10 +23,12 @@ import javax.naming.OperationNotSupportedException; import org.springframework.cloud.dataflow.rest.resource.CurrentTaskExecutionsResource; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; import org.springframework.cloud.dataflow.rest.resource.LauncherResource; import org.springframework.cloud.dataflow.rest.resource.TaskAppStatusResource; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource; +import org.springframework.cloud.dataflow.rest.resource.TaskExecutionThinResource; import org.springframework.hateoas.PagedModel; /** @@ -36,6 +38,7 @@ * @author Michael Minella * @author Gunnar Hillert * @author David Turanski + * @author Corneil du Plessis */ public interface TaskOperations { @@ -53,8 +56,8 @@ public interface TaskOperations { /** * Create a new task definition * - * @param name the name of the task - * @param definition the task definition DSL + * @param name the name of the task + * @param definition the task definition DSL * @param description the description of the task definition * @return the task definition */ @@ -63,27 +66,29 @@ public interface TaskOperations { /** * Launch an already created task. * - * @param name the name of the task + * @param name the name of the task * @param properties the deployment properties - * @param arguments the command line arguments + * @param arguments the command line arguments * @return long containing the TaskExecutionId */ - long launch(String name, Map properties, List arguments); + LaunchResponseResource launch(String name, Map properties, List arguments); /** * Request the stop of a group {@link org.springframework.cloud.task.repository.TaskExecution}s. * - * @param ids comma delimited set of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. + * @param ids comma delimited set of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. + * @param schemaTarget the schema target of the task execution. */ - void stop(String ids); + void stop(String ids, String schemaTarget); /** * Request the stop of a group {@link org.springframework.cloud.task.repository.TaskExecution}s. * - * @param ids comma delimited set of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. - * @param platform the platform name where the task is executing. + * @param ids comma delimited set of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. + * @param schemaTarget the schema target of the task execution. + * @param platform the platform name where the task is executing. */ - void stop(String ids, String platform); + void stop(String ids, String schemaTarget, String platform); /** * Destroy an existing task. @@ -95,7 +100,7 @@ public interface TaskOperations { /** * Destroy an existing task with the flag to cleanup task resources. * - * @param name the name of the task + * @param name the name of the task * @param cleanup flag indicates task execution cleanup */ void destroy(String name, boolean cleanup); @@ -105,6 +110,11 @@ public interface TaskOperations { */ PagedModel executionList(); + /** + * @return the list of thin task executions known to the system. + */ + PagedModel thinExecutionList(); + /** * List task executions known to the system filtered by task name. * @@ -113,13 +123,22 @@ public interface TaskOperations { */ PagedModel executionListByTaskName(String taskName); + /** + * List task thin executions known to the system filtered by task name. + * + * @param taskName of the executions. + * @return the list of thin task executions known to the system. + */ + PagedModel thinExecutionListByTaskName(String taskName); + /** * Return the {@link TaskExecutionResource} for the id specified. * - * @param id identifier of the task execution + * @param id identifier of the task execution + * @param schemaTarget the schema target of the task execution. * @return {@link TaskExecutionResource} */ - TaskExecutionResource taskExecutionStatus(long id); + TaskExecutionResource taskExecutionStatus(long id, String schemaTarget); /** * Return the task execution log. The platform from which to retrieve the log will be set to {@code default}. @@ -133,7 +152,7 @@ public interface TaskOperations { * Return the task execution log. * * @param externalExecutionId the external execution identifier of the task execution. - * @param platform the platform from which to obtain the log. + * @param platform the platform from which to obtain the log. * @return {@link String} containing the log. */ String taskExecutionLog(String externalExecutionId, String platform); @@ -141,6 +160,7 @@ public interface TaskOperations { /** * Return information including the count of currently executing tasks and task execution * limits. + * * @return Collection of {@link CurrentTaskExecutionsResource} */ Collection currentTaskExecutions(); @@ -148,36 +168,41 @@ public interface TaskOperations { /** * Cleanup any resources associated with the execution for the id specified. * - * @param id identifier of the task execution + * @param id identifier of the task execution + * @param schemaTarget the schema target of the task execution. */ - void cleanup(long id); + void cleanup(long id, String schemaTarget); /** * Cleanup any resources associated with the execution for the id specified. * - * @param id identifier of the task execution - * @param removeData delete the history of the execution + * @param id identifier of the task execution + * @param schemaTarget the schema target of the task execution. + * @param removeData delete the history of the execution */ - void cleanup(long id, boolean removeData); + void cleanup(long id, String schemaTarget, boolean removeData); /** * Cleanup any resources associated with the matching task executions. * * @param completed cleanup only completed task executions - * @param taskName the name of the task to cleanup, if null then all the tasks are considered. + * @param taskName the name of the task to cleanup, if null then all the tasks are considered. */ void cleanupAllTaskExecutions(boolean completed, String taskName); /** * Get the task executions count with the option to filter only the completed task executions. + * * @param completed cleanup only completed task executions - * @param taskName the name of the task to cleanup, if null then all the tasks are considered. + * @param taskName the name of the task to cleanup, if null then all the tasks are considered. + * @return the number of task executions. */ Integer getAllTaskExecutionsCount(boolean completed, String taskName); /** * Return the validation status for the tasks in an definition. + * * @param taskDefinitionName The name of the task definition to be validated. * @return {@link TaskAppStatusResource} containing the task app statuses. * @throws OperationNotSupportedException if the server does not support task validation @@ -186,7 +211,6 @@ public interface TaskOperations { /** * Destroy all existing tasks. - * */ void destroyAll(); } diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskTemplate.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskTemplate.java index 77c92127e3..d884e9cd68 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskTemplate.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskTemplate.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2021 the original author or authors. + * Copyright 2015-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,17 +20,23 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.stream.Stream; import javax.naming.OperationNotSupportedException; import org.springframework.cloud.dataflow.rest.client.support.VersionUtils; import org.springframework.cloud.dataflow.rest.resource.CurrentTaskExecutionsResource; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; import org.springframework.cloud.dataflow.rest.resource.LauncherResource; import org.springframework.cloud.dataflow.rest.resource.TaskAppStatusResource; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource; +import org.springframework.cloud.dataflow.rest.resource.TaskExecutionThinResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionsInfoResource; +import org.springframework.cloud.dataflow.rest.resource.about.AboutResource; import org.springframework.cloud.dataflow.rest.util.DeploymentPropertiesUtils; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.core.ParameterizedTypeReference; import org.springframework.hateoas.Link; import org.springframework.hateoas.RepresentationModel; @@ -49,6 +55,7 @@ * @author Michael Minella * @author Gunnar Hillert * @author David Turanski + * @author Corneil du Plessis */ public class TaskTemplate implements TaskOperations { @@ -60,12 +67,23 @@ public class TaskTemplate implements TaskOperations { private static final String VALIDATION_RELATION_VERSION = "1.7.0"; + private static final String VALIDATION_THIN_TASK_VERSION = "2.11.3"; + + private static final String VALIDATION_TASK_LAUNCH_VERSION = "2.11.0"; + private static final String VALIDATION_TASK_THIN_BY_NAME = "2.11.6"; + private static final String EXECUTIONS_RELATION = "tasks/executions"; + private static final String THIN_EXECUTIONS_RELATION = "tasks/thinexecutions"; + + private static final String THIN_EXECUTIONS_BY_NAME_RELATION = "tasks/thinexecutions/name"; + private static final String EXECUTIONS_CURRENT_RELATION = "tasks/executions/current"; private static final String EXECUTION_RELATION = "tasks/executions/execution"; + private static final String EXECUTION_LAUNCH_RELATION = "tasks/executions/launch"; + private static final String EXECUTION_RELATION_BY_NAME = "tasks/executions/name"; private static final String EXECUTIONS_INFO_RELATION = "tasks/info/executions"; @@ -84,8 +102,14 @@ public class TaskTemplate implements TaskOperations { private final Link executionsLink; + private final Link thinExecutionsLink; + + private final Link thinExecutionsByNameLink; + private final Link executionLink; + private final Link executionLaunchLink; + private final Link executionByNameLink; private final Link executionsCurrentLink; @@ -97,46 +121,73 @@ public class TaskTemplate implements TaskOperations { private final Link platformListLink; private final String dataFlowServerVersion; + private String actualDataFlowServerCoreVersion = null; private final Link retrieveLogLink; - + private final Link aboutLink; TaskTemplate(RestTemplate restTemplate, RepresentationModel resources, String dataFlowServerVersion) { Assert.notNull(resources, "URI CollectionModel must not be be null"); - Assert.notNull(resources.getLink(EXECUTIONS_RELATION), "Executions relation is required"); - Assert.notNull(resources.getLink(DEFINITIONS_RELATION), "Definitions relation is required"); - Assert.notNull(resources.getLink(DEFINITION_RELATION), "Definition relation is required"); Assert.notNull(restTemplate, "RestTemplate must not be null"); - Assert.notNull(resources.getLink(EXECUTIONS_RELATION), "Executions relation is required"); - Assert.notNull(resources.getLink(EXECUTION_RELATION), "Execution relation is required"); - Assert.notNull(resources.getLink(EXECUTION_RELATION_BY_NAME), "Execution by name relation is required"); Assert.notNull(dataFlowServerVersion, "dataFlowVersion must not be null"); - Assert.notNull(resources.getLink(RETRIEVE_LOG), "Log relation is required"); + Stream.of( + "about", + DEFINITIONS_RELATION, + DEFINITION_RELATION, + EXECUTIONS_RELATION, + EXECUTION_RELATION, + EXECUTION_RELATION_BY_NAME, + EXECUTIONS_INFO_RELATION, + PLATFORM_LIST_RELATION, + RETRIEVE_LOG + ).forEach(relation -> { + Assert.isTrue(resources.getLink(relation).isPresent(), () -> relation + " relation is required"); + }); + this.restTemplate = restTemplate; this.dataFlowServerVersion = dataFlowServerVersion; - if (VersionUtils.isDataFlowServerVersionGreaterThanOrEqualToRequiredVersion( - VersionUtils.getThreePartVersion(dataFlowServerVersion), - VALIDATION_RELATION_VERSION)) { - Assert.notNull(resources.getLink(VALIDATION_REL), "Validiation relation for tasks is required"); + String version = VersionUtils.getThreePartVersion(dataFlowServerVersion); + if (VersionUtils.isDataFlowServerVersionGreaterThanOrEqualToRequiredVersion(version, VALIDATION_RELATION_VERSION)) { + Assert.isTrue(resources.getLink(VALIDATION_REL).isPresent(), ()->VALIDATION_REL + " relation is required"); + this.validationLink = resources.getLink(VALIDATION_REL).get(); + } else { + this.validationLink = null; } - if (VersionUtils.isDataFlowServerVersionGreaterThanOrEqualToRequiredVersion( - VersionUtils.getThreePartVersion(dataFlowServerVersion), - EXECUTIONS_CURRENT_RELATION_VERSION)) { - Assert.notNull(resources.getLink(EXECUTIONS_CURRENT_RELATION), "Executions current relation is required"); + if(VersionUtils.isDataFlowServerVersionGreaterThanOrEqualToRequiredVersion(version, VALIDATION_THIN_TASK_VERSION)) { + Assert.isTrue(resources.getLink(THIN_EXECUTIONS_RELATION).isPresent(), () -> THIN_EXECUTIONS_RELATION + " relation is required"); + this.thinExecutionsLink = resources.getLink(THIN_EXECUTIONS_RELATION).get(); + } else { + this.thinExecutionsLink = null; + } + if(VersionUtils.isDataFlowServerVersionGreaterThanOrEqualToRequiredVersion(version, VALIDATION_TASK_THIN_BY_NAME)) { + Assert.isTrue(resources.getLink(THIN_EXECUTIONS_BY_NAME_RELATION).isPresent(), () -> THIN_EXECUTIONS_BY_NAME_RELATION + " relation is required"); + this.thinExecutionsByNameLink = resources.getLink(THIN_EXECUTIONS_BY_NAME_RELATION).get(); + } else { + this.thinExecutionsByNameLink = null; } - this.restTemplate = restTemplate; + if (VersionUtils.isDataFlowServerVersionGreaterThanOrEqualToRequiredVersion(version, VALIDATION_TASK_LAUNCH_VERSION)) { + Assert.isTrue(resources.getLink(EXECUTION_LAUNCH_RELATION).isPresent(), () -> EXECUTION_LAUNCH_RELATION + " relation is required"); + this.executionLaunchLink = resources.getLink(EXECUTION_LAUNCH_RELATION).get(); + } else { + this.executionLaunchLink = null; + } + + if (VersionUtils.isDataFlowServerVersionGreaterThanOrEqualToRequiredVersion(version, EXECUTIONS_CURRENT_RELATION_VERSION)) { + Assert.isTrue(resources.getLink(EXECUTIONS_CURRENT_RELATION).isPresent(), ()-> EXECUTIONS_CURRENT_RELATION + " relation is required"); + this.executionsCurrentLink = resources.getLink(EXECUTIONS_CURRENT_RELATION).get(); + } else { + this.executionsCurrentLink = null; + } + this.aboutLink = resources.getLink("about").get(); + this.definitionsLink = resources.getLink(DEFINITIONS_RELATION).get(); this.definitionLink = resources.getLink(DEFINITION_RELATION).get(); this.executionsLink = resources.getLink(EXECUTIONS_RELATION).get(); this.executionLink = resources.getLink(EXECUTION_RELATION).get(); this.executionByNameLink = resources.getLink(EXECUTION_RELATION_BY_NAME).get(); - this.executionsCurrentLink = resources.getLink(EXECUTIONS_CURRENT_RELATION).get(); - if (resources.getLink(EXECUTIONS_INFO_RELATION).isPresent()) { - this.executionsInfoLink = resources.getLink(EXECUTIONS_INFO_RELATION).get(); - } - this.validationLink = resources.getLink(VALIDATION_REL).get(); + this.executionsInfoLink = resources.getLink(EXECUTIONS_INFO_RELATION).get(); this.platformListLink = resources.getLink(PLATFORM_LIST_RELATION).get(); this.retrieveLogLink = resources.getLink(RETRIEVE_LOG).get(); } @@ -157,33 +208,66 @@ public LauncherResource.Page listPlatforms() { @Override public TaskDefinitionResource create(String name, String definition, String description) { - MultiValueMap values = new LinkedMultiValueMap(); + MultiValueMap values = new LinkedMultiValueMap<>(); values.add("name", name); values.add("definition", definition); values.add("description", description); - return restTemplate.postForObject(definitionsLink.expand().getHref(), values, - TaskDefinitionResource.class); + return restTemplate.postForObject(definitionsLink.expand().getHref(), values, + TaskDefinitionResource.class); + } + private boolean isNewServer() { + if(this.actualDataFlowServerCoreVersion == null) { + AboutResource aboutResource = restTemplate.getForObject(aboutLink.expand().getHref(), AboutResource.class); + Assert.notNull(aboutResource, "Expected about"); + this.actualDataFlowServerCoreVersion = aboutResource.getVersionInfo().getCore().getVersion(); + } + String v2_11_0 = VersionUtils.getThreePartVersion("2.11.0-SNAPSHOT"); + String serverVersion = VersionUtils.getThreePartVersion(this.actualDataFlowServerCoreVersion); + return VersionUtils.isDataFlowServerVersionGreaterThanOrEqualToRequiredVersion(serverVersion, v2_11_0); } - @Override - public long launch(String name, Map properties, List arguments) { + public LaunchResponseResource launch(String name, Map properties, List arguments) { MultiValueMap values = new LinkedMultiValueMap<>(); - values.add("properties", DeploymentPropertiesUtils.format(properties)); - values.add("arguments", StringUtils.collectionToDelimitedString(arguments, " ")); - return restTemplate.postForObject(executionByNameLink.expand(name).getHref(), values, Long.class, name); + String formattedProperties = DeploymentPropertiesUtils.format(properties); + String commandLineArguments = StringUtils.collectionToDelimitedString(arguments, " "); + values.add("properties", formattedProperties); + values.add("arguments", commandLineArguments); + if(isNewServer()) { + Assert.notNull(executionLaunchLink, "This version of SCDF doesn't support tasks/executions/launch"); + values.add("name", name); + String url = executionLaunchLink.expand(name).getHref(); + values.remove("name"); + return restTemplate.postForObject(url, values, LaunchResponseResource.class); + } else { + Long id = restTemplate.postForObject(executionByNameLink.expand(name).getHref(), values, Long.class, name); + if(id != null) { + LaunchResponseResource response = new LaunchResponseResource(); + response.setExecutionId(id); + response.setSchemaTarget(SchemaVersionTarget.defaultTarget().getName()); + return response; + } else { + throw new RuntimeException("Expected id"); + } + } } @Override - public void stop(String ids) { + public void stop(String ids, String schemaTarget) { MultiValueMap values = new LinkedMultiValueMap<>(); - restTemplate.postForLocation(executionLink.expand(ids).getHref(),values); + if (StringUtils.hasText(schemaTarget)) { + values.add("schemaTarget", schemaTarget); + } + restTemplate.postForLocation(executionLink.expand(ids).getHref(), values); } @Override - public void stop(String ids, String platform) { + public void stop(String ids, String schemaTarget, String platform) { MultiValueMap values = new LinkedMultiValueMap<>(); values.add("platform", platform); - restTemplate.postForLocation(executionLink.expand(ids).getHref(),values); + if (StringUtils.hasText(schemaTarget)) { + values.add("schemaTarget", schemaTarget); + } + restTemplate.postForLocation(executionLink.expand(ids).getHref(), values); } @Override @@ -207,15 +291,38 @@ public TaskExecutionResource.Page executionList() { return restTemplate.getForObject(executionsLink.getHref(), TaskExecutionResource.Page.class); } + @Override + public TaskExecutionThinResource.Page thinExecutionList() { + if(thinExecutionsLink != null) { + return restTemplate.getForObject(thinExecutionsLink.getHref(), TaskExecutionThinResource.Page.class); + } else { + return restTemplate.getForObject(executionsLink.getHref(), TaskExecutionThinResource.Page.class); + } + } + + @Override + public TaskExecutionThinResource.Page thinExecutionListByTaskName(String taskName) { + if(thinExecutionsByNameLink != null) { + return restTemplate.getForObject(thinExecutionsByNameLink.expand(taskName).getHref(), TaskExecutionThinResource.Page.class); + } else { + return restTemplate.getForObject(executionByNameLink.expand(taskName).getHref(), TaskExecutionThinResource.Page.class); + } + } + @Override public TaskExecutionResource.Page executionListByTaskName(String taskName) { - return restTemplate.getForObject(executionByNameLink.expand(taskName).getHref(), - TaskExecutionResource.Page.class); + return restTemplate.getForObject(executionByNameLink.expand(taskName).getHref(), TaskExecutionResource.Page.class); } @Override - public TaskExecutionResource taskExecutionStatus(long id) { - return restTemplate.getForObject(executionLink.expand(id).getHref(), TaskExecutionResource.class); + public TaskExecutionResource taskExecutionStatus(long id, String schemaTarget) { + MultiValueMap values = new LinkedMultiValueMap<>(); + values.add("id", id); + if (StringUtils.hasText(schemaTarget)) { + values.add("schemaTarget", schemaTarget); + } + String url = executionLink.expand(values).getHref(); + return restTemplate.getForObject(url, TaskExecutionResource.class); } @Override @@ -225,8 +332,8 @@ public String taskExecutionLog(String externalExecutionId) { @Override public String taskExecutionLog(String externalExecutionId, String platform) { - Map map = new HashMap<>(); - map.put("taskExternalExecutionId",externalExecutionId); + Map map = new HashMap<>(); + map.put("taskExternalExecutionId", externalExecutionId); map.put("platformName", platform); return restTemplate.getForObject(retrieveLogLink.expand(map).getHref(), String.class); } @@ -235,23 +342,31 @@ public String taskExecutionLog(String externalExecutionId, String platform) { public Collection currentTaskExecutions() { ParameterizedTypeReference> typeReference = new ParameterizedTypeReference>() { - }; + }; return restTemplate - .exchange(executionsCurrentLink.getHref(),HttpMethod.GET,null, typeReference).getBody(); + .exchange(executionsCurrentLink.getHref(), HttpMethod.GET, null, typeReference).getBody(); } @Override - public void cleanup(long id) { - cleanup(id, false); + public void cleanup(long id, String schemaTarget) { + cleanup(id, schemaTarget, false); } @Override - public void cleanup(long id, boolean removeData) { - String uriTemplate = executionLink.expand(id).getHref(); - if (removeData) { - uriTemplate = uriTemplate + "?action=CLEANUP,REMOVE_DATA"; - } - restTemplate.delete(uriTemplate); + public void cleanup(long id, String schemaTarget, boolean removeData) { + MultiValueMap values = new LinkedMultiValueMap<>(); + + String uriTemplate = executionLink.expand(id).getHref(); + + if (removeData) { + uriTemplate = uriTemplate + "?action=CLEANUP,REMOVE_DATA"; + } + + if (StringUtils.hasText(schemaTarget)) { + String schemaVal = (removeData) ? "&schemaTarget=" + schemaTarget : "?schemaTarget=" + schemaTarget; + uriTemplate = uriTemplate + schemaVal; + } + restTemplate.delete(uriTemplate); } @Override @@ -268,13 +383,13 @@ public void cleanupAllTaskExecutions(boolean completed, String taskName) { @Override public Integer getAllTaskExecutionsCount(boolean completed, String taskName) { - Map map = new HashMap<>(); + Map map = new HashMap<>(); map.put("completed", String.valueOf(completed)); map.put("name", StringUtils.hasText(taskName) ? taskName : ""); if (this.executionsInfoLink != null) { - return restTemplate - .getForObject(this.executionsInfoLink.expand(map).getHref(), TaskExecutionsInfoResource.class) - .getTotalExecutions(); + return Objects.requireNonNull( + restTemplate.getForObject(this.executionsInfoLink.expand(map).getHref(), TaskExecutionsInfoResource.class) + ).getTotalExecutions(); } // for backwards-compatibility return zero count return 0; @@ -283,10 +398,10 @@ public Integer getAllTaskExecutionsCount(boolean completed, String taskName) { @Override public TaskAppStatusResource validateTaskDefinition(String taskDefinitionName) - throws OperationNotSupportedException { + throws OperationNotSupportedException { if (validationLink == null) { throw new OperationNotSupportedException("Task Validation not supported on Data Flow Server version " - + dataFlowServerVersion); + + dataFlowServerVersion); } String uriTemplate = this.validationLink.expand(taskDefinitionName).getHref(); return restTemplate.getForObject(uriTemplate, TaskAppStatusResource.class); diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientAutoConfiguration.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientAutoConfiguration.java index 0ddaf5b151..75cbf546ec 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientAutoConfiguration.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientAutoConfiguration.java @@ -21,6 +21,7 @@ import java.util.List; import java.util.Map; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -78,7 +79,6 @@ public class DataFlowClientAutoConfiguration { @Autowired private DataFlowClientProperties properties; - @Autowired(required = false) private RestTemplate restTemplate; @Autowired @@ -90,9 +90,13 @@ public class DataFlowClientAutoConfiguration { @Autowired private @Nullable OAuth2ClientProperties oauth2ClientProperties; + public DataFlowClientAutoConfiguration(@Nullable RestTemplate restTemplate) { + this.restTemplate = restTemplate; + } + @Bean @ConditionalOnMissingBean(DataFlowOperations.class) - public DataFlowOperations dataFlowOperations() throws Exception{ + public DataFlowOperations dataFlowOperations(@Nullable ObjectMapper mapper) throws Exception{ RestTemplate template = DataFlowTemplate.prepareRestTemplate(restTemplate); final HttpClientConfigurer httpClientConfigurer = HttpClientConfigurer.create(new URI(properties.getServerUri())) .skipTlsCertificateVerification(properties.isSkipSslValidation()); @@ -127,7 +131,7 @@ else if (oauth2ClientProperties != null && !oauth2ClientProperties.getRegistrati logger.debug("Not configuring security for accessing the Data Flow Server"); } - return new DataFlowTemplate(new URI(properties.getServerUri()), template); + return new DataFlowTemplate(new URI(properties.getServerUri()), template, mapper); } @Bean diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/Stream.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/Stream.java index 72a3f7278a..82dff20cf4 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/Stream.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/Stream.java @@ -219,7 +219,7 @@ public String logs(StreamApplication app) { /** * @return Returns a map of the stream applications, associating every application with its applications instances - * and their current runtime states: (App -> (AppInstanceId -> AppInstanceState)). + * and their current runtime states: {@code (App -> (AppInstanceId -> AppInstanceState))}. */ public Map> runtimeApps() { diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/Task.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/Task.java index cbd4c305ea..62785da44e 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/Task.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/Task.java @@ -22,6 +22,7 @@ import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -29,7 +30,9 @@ import org.springframework.cloud.dataflow.rest.client.JobOperations; import org.springframework.cloud.dataflow.rest.client.TaskOperations; import org.springframework.cloud.dataflow.rest.resource.JobExecutionResource; +import org.springframework.cloud.dataflow.rest.resource.JobExecutionThinResource; import org.springframework.cloud.dataflow.rest.resource.JobInstanceResource; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; import org.springframework.cloud.dataflow.rest.resource.StepExecutionResource; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource; @@ -39,7 +42,7 @@ /** * Represents a Task defined on DataFlow server. New Task can be defined with the help of a fluent style builder * pattern or use the {@link Task} static utility methods to retrieve existing tasks already defined in DataFlow. - * + *

    * For for instance you can define a new task like this: *

      *     {@code
    @@ -50,7 +53,7 @@
      *              .build();
      *     }
      * 
    - * + *

    * Next you can launch the task and inspect the executions result. Mind that the task is run asynchronously. *

      *     import org.awaitility.Awaitility;
    @@ -65,7 +68,7 @@
      *          task.executions().forEach( execution -> System.out.println(execution.getExitCode()));
      *     }
      * 
    - * + *

    * Use

    {@code close()}
    to destroy the task manually. Since tasks are auto-closable you can use the * Java try block instead: *
    @@ -82,7 +85,7 @@
      *          } // Task is destroyed.
      *     }
      * 
    - * + *

    * Use the {@link TaskBuilder#allTasks()} and {@link TaskBuilder#findByName(String)} * static helper methods to list or retrieve existing tasks defined in DataFlow. * @@ -90,8 +93,11 @@ */ public class Task implements AutoCloseable { private final String taskName; + private final TaskOperations taskOperations; + private final JobOperations jobOperations; + private final DataFlowOperations dataFlowOperations; Task(String taskName, DataFlowOperations dataFlowOperations) { @@ -107,6 +113,7 @@ public class Task implements AutoCloseable { /** * Fluent API method to create a {@link TaskBuilder}. + * * @param dataFlowOperations {@link DataFlowOperations} Data Flow Rest client instance. * @return A fluent style builder to create tasks. */ @@ -116,28 +123,31 @@ public static TaskBuilder builder(DataFlowOperations dataFlowOperations) { /** * Launch a task without properties or arguments. + * * @return long containing the TaskExecutionId */ - public long launch() { - return this.launch(Collections.EMPTY_LIST); + public LaunchResponseResource launch() { + return this.launch(Collections.emptyList()); } /** * Launch a task with command line arguments. + * * @param arguments the command line arguments. * @return long containing the TaskExecutionId */ - public long launch(List arguments) { - return this.launch(Collections.EMPTY_MAP, arguments); + public LaunchResponseResource launch(List arguments) { + return this.launch(Collections.emptyMap(), arguments); } /** * Launch a task with deployment properties and command line arguments. + * * @param properties the deployment properties. - * @param arguments the command line arguments. + * @param arguments the command line arguments. * @return long containing the TaskExecutionId */ - public long launch(Map properties, List arguments) { + public LaunchResponseResource launch(Map properties, List arguments) { if (properties == null) { throw new IllegalArgumentException("Task properties can't be null!"); } @@ -146,33 +156,36 @@ public long launch(Map properties, List arguments) { /** * Stop all Tasks' running {@link org.springframework.cloud.task.repository.TaskExecution}s. - * + *

    * Note: this functionality is platform dependent! It works for local platform but does nothing on K8s! */ public void stop() { - String commaSeparatedIds = executions().stream() - .filter(Objects::nonNull) - .filter(e -> e.getTaskExecutionStatus() == TaskExecutionStatus.RUNNING) - .map(TaskExecutionResource::getExecutionId) - .map(String::valueOf) + Map> idTargets = executions().stream() + .filter(Objects::nonNull) + .filter(e -> e.getTaskExecutionStatus() == TaskExecutionStatus.RUNNING) + .collect(Collectors.groupingBy(TaskExecutionResource::getSchemaTarget, Collectors.toSet())); + idTargets.forEach((schemaTarget, tasks) -> { + String ids = tasks.stream() + .map(taskExecutionResource -> String.valueOf(taskExecutionResource.getExecutionId())) .collect(Collectors.joining(",")); - if (StringUtils.hasText(commaSeparatedIds)) { - this.taskOperations.stop(commaSeparatedIds); - } + this.taskOperations.stop(ids, schemaTarget); + }); } /** * Stop a list of {@link org.springframework.cloud.task.repository.TaskExecution}s. - * @param taskExecutionIds List of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. * - * Note: this functionality is platform dependent! It works for local platform but does nothing on K8s! + * @param schemaTarget the schema target of the task executions. + * @param taskExecutionIds List of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. + *

    + * Note: this functionality is platform dependent! It works for local platform but does nothing on K8s! */ - public void stop(long... taskExecutionIds) { + public void stop(String schemaTarget, long... taskExecutionIds) { String commaSeparatedIds = Stream.of(taskExecutionIds) - .map(String::valueOf) - .collect(Collectors.joining(",")); + .map(String::valueOf) + .collect(Collectors.joining(",")); if (StringUtils.hasText(commaSeparatedIds)) { - this.taskOperations.stop(commaSeparatedIds); + this.taskOperations.stop(commaSeparatedIds, schemaTarget); } } @@ -189,6 +202,7 @@ public void destroy() { /** * List task executions for this task. + * * @return List of task executions for the given task. */ public Collection executions() { @@ -197,37 +211,40 @@ public Collection executions() { /** * Retrieve task execution by Id. - * @param executionId Task execution Id + * + * @param executionId Task execution Id + * @param schemaTarget the schema target of the task execution. * @return Task executions for the given task execution id. */ - public Optional execution(long executionId) { - return this.executions().stream() - .filter(Objects::nonNull) - .filter(e -> e.getExecutionId() == executionId) - .findFirst(); + public Optional execution(long executionId, String schemaTarget) { + return Optional.ofNullable(this.taskOperations.taskExecutionStatus(executionId, schemaTarget)); } /** * Find {@link TaskExecutionResource} by a parent execution id. + * * @param parentExecutionId parent task execution id. + * @param schemaTarget the schema target of the parent execution. * @return Return TaskExecutionResource */ - public Optional executionByParentExecutionId(long parentExecutionId) { + public Optional executionByParentExecutionId(long parentExecutionId, String schemaTarget) { return this.executions().stream() - .filter(Objects::nonNull) - .filter(e -> e.getParentExecutionId() == parentExecutionId) - .findFirst(); + .filter(Objects::nonNull) + .filter(e -> e.getParentExecutionId() == parentExecutionId) + .findFirst(); } /** * Task execution status - * @param executionId execution Id + * + * @param executionId execution Id. + * @param schemaTarget the schema target of the execution. * @return returns the task execution status. */ - public TaskExecutionStatus executionStatus(long executionId) { - return this.execution(executionId) - .map(TaskExecutionResource::getTaskExecutionStatus) - .orElse(TaskExecutionStatus.UNKNOWN); + public TaskExecutionStatus executionStatus(long executionId, String schemaTarget) { + return this.execution(executionId, schemaTarget) + .map(TaskExecutionResource::getTaskExecutionStatus) + .orElse(TaskExecutionStatus.UNKNOWN); } /** @@ -242,12 +259,12 @@ public boolean isComposed() { */ public List composedTaskChildTasks() { return !isComposed() ? - new ArrayList<>() : - this.taskOperations.list().getContent().stream() - .filter(Objects::nonNull) - .filter(t -> t.getName().startsWith(this.taskName + "-")) - .map(t -> new Task(t.getName(), this.dataFlowOperations)) - .collect(Collectors.toList()); + new ArrayList<>() : + this.taskOperations.list().getContent().stream() + .filter(Objects::nonNull) + .filter(t -> t.getName().startsWith(this.taskName + "-")) + .map(t -> new Task(t.getName(), this.dataFlowOperations)) + .collect(Collectors.toList()); } /** @@ -256,7 +273,7 @@ public List composedTaskChildTasks() { */ public Optional composedTaskChildTaskByLabel(String childTaskLabel) { return this.composedTaskChildTasks().stream() - .filter(childTask -> childTask.getTaskName().endsWith("-" + childTaskLabel)).findFirst(); + .filter(childTask -> childTask.getTaskName().endsWith("-" + childTaskLabel)).findFirst(); } @@ -271,11 +288,17 @@ public Collection jobExecutionResources() { return this.jobOperations.executionListByJobName(this.taskName).getContent(); } + public Collection thinkJobExecutionResources() { + return this.jobOperations.executionThinListByJobName(this.taskName).getContent(); + } + /** + * @param jobExecutionId the job execution id. + * @param schemaTarget the schema target of the job execution. * @return Returns list of {@link StepExecutionResource} belonging to the job. */ - public Collection jobStepExecutions(long jobExecutionId) { - return this.jobOperations.stepExecutionList(jobExecutionId).getContent(); + public Collection jobStepExecutions(long jobExecutionId, String schemaTarget) { + return this.jobOperations.stepExecutionList(jobExecutionId, schemaTarget).getContent(); } /** @@ -287,9 +310,9 @@ public Collection jobInstanceResources() { private Optional definitionResource() { return this.taskOperations.list().getContent().stream() - .filter(Objects::nonNull) - .filter(t -> t.getName().equals(this.taskName)) - .findFirst(); + .filter(Objects::nonNull) + .filter(t -> t.getName().equals(this.taskName)) + .findFirst(); } /** @@ -303,4 +326,37 @@ public String getTaskName() { public void close() { destroy(); } + + //-------------------------------------------------------------------------------------------------------- + // TASK EXECUTION CLEANUP + //-------------------------------------------------------------------------------------------------------- + + /** + * Remove specified task execution for the specified task execution id. + * + * @param taskExecutionId the id of the task execution to be removed. + * @param schemaTarget the schema target + */ + public void cleanupTaskExecution(long taskExecutionId, String schemaTarget) { + this.taskOperations.cleanup(taskExecutionId, schemaTarget, true); + } + + /** + * Remove all task executions. + */ + public void cleanupAllTaskExecutions() { + this.taskOperations.cleanupAllTaskExecutions(false, null); + } + + /** + * Retrieve task executions for child task name associated with this task's instance. + * + * @param childTaskName to be used to search for the associated task executions. + * @return List of task executions for the given child task. + */ + public Optional composedTaskChildExecution(String childTaskName) { + Collection taskExecutions = taskOperations.executionListByTaskName(this.taskName + "-" + childTaskName).getContent(); + return (taskExecutions.size() == 1) ? Optional.of((TaskExecutionResource) taskExecutions.stream().toArray()[0]) : Optional.empty(); + } + } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataFlowClientAutoConfigurationTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataFlowClientAutoConfigurationTests.java index 121459f91f..1f673ad81f 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataFlowClientAutoConfigurationTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataFlowClientAutoConfigurationTests.java @@ -17,8 +17,7 @@ import java.util.Collections; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.springframework.boot.SpringApplication; @@ -30,9 +29,10 @@ import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.web.client.RestTemplate; - +import static org.assertj.core.api.Assertions.assertThat; /** * @author Vinicius Carvalho + * @author Corneil du Plessis */ public class DataFlowClientAutoConfigurationTests { @@ -41,8 +41,8 @@ public void contextLoads() throws Exception { ConfigurableApplicationContext applicationContext = SpringApplication.run(TestApplication.class, "--spring.cloud.dataflow.client.enableDsl=true", "--spring.autoconfigure.exclude=org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeployerAutoConfiguration,org.springframework.cloud.deployer.spi.kubernetes.KubernetesAutoConfiguration"); - Assert.assertNotNull(applicationContext.getBean(DataFlowTemplate.class)); - Assert.assertNotNull(applicationContext.getBean(StreamBuilder.class)); + assertThat(applicationContext.getBean(DataFlowTemplate.class)).isNotNull(); + assertThat(applicationContext.getBean(StreamBuilder.class)).isNotNull(); RestTemplate template = applicationContext.getBean(RestTemplate.class); //No auth Mockito.verify(template, Mockito.times(0)).setRequestFactory(Mockito.any()); @@ -55,14 +55,14 @@ public void usingAuthentication() throws Exception { "--spring.cloud.dataflow.client.authentication.basic.username=foo", "--spring.cloud.dataflow.client.authentication.basic.password=bar", "--spring.autoconfigure.exclude=org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeployerAutoConfiguration,org.springframework.cloud.deployer.spi.kubernetes.KubernetesAutoConfiguration"); - Assert.assertNotNull(applicationContext.getBean(DataFlowTemplate.class)); - Assert.assertNotNull(applicationContext.getBean(StreamBuilder.class)); + assertThat(applicationContext.getBean(DataFlowTemplate.class)).isNotNull(); + assertThat(applicationContext.getBean(StreamBuilder.class)).isNotNull(); RestTemplate template = applicationContext.getBean(RestTemplate.class); DataFlowClientProperties properties = applicationContext.getBean(DataFlowClientProperties.class); - Assert.assertNotNull(properties.getAuthentication()); - Assert.assertEquals("foo", properties.getAuthentication().getBasic().getUsername()); - Assert.assertEquals("bar", properties.getAuthentication().getBasic().getPassword()); + assertThat(properties.getAuthentication()).isNotNull(); + assertThat(properties.getAuthentication().getBasic().getUsername()).isEqualTo("foo"); + assertThat(properties.getAuthentication().getBasic().getPassword()).isEqualTo("bar"); Mockito.verify(template, Mockito.times(1)).setRequestFactory(Mockito.any()); applicationContext.close(); } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowClientExceptionTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowClientExceptionTests.java index 81d8eaac51..39fe2dd47b 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowClientExceptionTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowClientExceptionTests.java @@ -15,17 +15,19 @@ */ package org.springframework.cloud.dataflow.rest.client; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.hateoas.Link; import org.springframework.hateoas.mediatype.vnderrors.VndErrors; import org.springframework.hateoas.mediatype.vnderrors.VndErrors.VndError; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; + /** * @author Gunnar Hillert + * @author Corneil du Plessis */ public class DataflowClientExceptionTests { @@ -36,7 +38,7 @@ public void testCreationOfDataflowClientExceptionWithNullError() { new DataFlowClientException(null); } catch (IllegalArgumentException e) { - assertEquals("The provided vndErrors parameter must not be null.", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("The provided vndErrors parameter must not be null."); return; } @@ -45,18 +47,18 @@ public void testCreationOfDataflowClientExceptionWithNullError() { @Test public void testCreationOfDataflowClientExceptionWithSingleError() { - final VndErrors errors = new VndErrors("foo", "bar message", new Link("somewhere")); + final VndErrors errors = new VndErrors("foo", "bar message", Link.of("somewhere")); final DataFlowClientException dataFlowClientException = new DataFlowClientException(errors); - assertEquals("bar message", dataFlowClientException.getMessage()); + assertThat(dataFlowClientException.getMessage()).isEqualTo("bar message"); } @Test public void testCreationOfDataflowClientExceptionWithMultipleErrors() { - final VndError vndError1 = new VndError("foo logref", "foo message", new Link("foo link")); - final VndError vndError2 = new VndError("bar logref", "bar message", new Link("bar link")); + final VndError vndError1 = new VndError("foo logref", "foo message", Link.of("foo link")); + final VndError vndError2 = new VndError("bar logref", "bar message", Link.of("bar link")); final VndErrors errors = new VndErrors(vndError1, vndError2); final DataFlowClientException dataFlowClientException = new DataFlowClientException(errors); - assertEquals("foo message\nbar message", dataFlowClientException.getMessage()); + assertThat(dataFlowClientException.getMessage()).isEqualTo("foo message\nbar message"); } } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowTemplateTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowTemplateTests.java index 9cf3fcc411..ee6c32f62f 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowTemplateTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowTemplateTests.java @@ -23,9 +23,11 @@ import java.util.Optional; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.JobExecution; @@ -37,18 +39,18 @@ import org.springframework.cloud.dataflow.rest.Version; import org.springframework.cloud.dataflow.rest.job.StepExecutionHistory; import org.springframework.cloud.dataflow.rest.resource.RootResource; +import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; import org.springframework.hateoas.Link; import org.springframework.hateoas.LinkRelation; +import org.springframework.hateoas.mediatype.hal.Jackson2HalModule; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.web.client.ResourceAccessException; import org.springframework.web.client.RestTemplate; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; @@ -57,15 +59,23 @@ /** * @author Gunnar Hillert * @author Glenn Renfro + * @author Corneil du Plessis */ public class DataflowTemplateTests { - @Before + private ObjectMapper mapper; + + @BeforeEach public void setup() { + mapper = new ObjectMapper(); + mapper.registerModule(new Jdk8Module()); + mapper.registerModule(new Jackson2HalModule()); + mapper.registerModule(new JavaTimeModule()); + mapper.registerModule(new Jackson2DataflowModule()); System.setProperty("sun.net.client.defaultConnectTimeout", String.valueOf(100)); } - @After + @AfterEach public void shutdown() { System.clearProperty("sun.net.client.defaultConnectTimeout"); } @@ -74,19 +84,21 @@ public void shutdown() { public void testDataFlowTemplateContructorWithNullUri() throws URISyntaxException { try { - new DataFlowTemplate(null); + new DataFlowTemplate(null, mapper); } catch (IllegalArgumentException e) { - assertEquals("The provided baseURI must not be null.", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("The provided baseURI must not be null."); return; } fail("Expected an IllegalArgumentException to be thrown."); } - @Test(expected = ResourceAccessException.class) + @Test public void testDataFlowTemplateContructorWithNonExistingUri() throws URISyntaxException { - new DataFlowTemplate(new URI("https://doesnotexist:1234")); + assertThatExceptionOfType(ResourceAccessException.class).isThrownBy(() -> { + new DataFlowTemplate(new URI("https://doesnotexist:1234"), mapper); + }); } @Test @@ -103,8 +115,7 @@ public void testPrepareObjectMapperWithNullObjectMapper() { fail("Expected an IllegalArgumentException to be thrown."); } catch (IllegalArgumentException e) { - assertEquals("The objectMapper must not be null.", e.getMessage()); - return; + assertThat(e.getMessage()).isEqualTo("The objectMapper must not be null."); } } @@ -112,8 +123,8 @@ public void testPrepareObjectMapperWithNullObjectMapper() { public void testThatDefaultDataflowRestTemplateContainsMixins() { final RestTemplate restTemplate = DataFlowTemplate.getDefaultDataflowRestTemplate(); - assertNotNull(restTemplate); - assertTrue(restTemplate.getErrorHandler() instanceof VndErrorResponseErrorHandler); + assertThat(restTemplate).isNotNull(); + assertThat(restTemplate.getErrorHandler() instanceof VndErrorResponseErrorHandler).isTrue(); assertCorrectMixins(restTemplate); @@ -138,14 +149,14 @@ private void assertCorrectMixins(RestTemplate restTemplate) { } private void assertCorrectMixins(ObjectMapper objectMapper) { - assertNotNull(objectMapper.findMixInClassFor(JobExecution.class)); - assertNotNull(objectMapper.findMixInClassFor(JobParameters.class)); - assertNotNull(objectMapper.findMixInClassFor(JobParameter.class)); - assertNotNull(objectMapper.findMixInClassFor(JobInstance.class)); - assertNotNull(objectMapper.findMixInClassFor(ExitStatus.class)); - assertNotNull(objectMapper.findMixInClassFor(StepExecution.class)); - assertNotNull(objectMapper.findMixInClassFor(ExecutionContext.class)); - assertNotNull(objectMapper.findMixInClassFor(StepExecutionHistory.class)); + assertThat(objectMapper.findMixInClassFor(JobExecution.class)).isNotNull(); + assertThat(objectMapper.findMixInClassFor(JobParameters.class)).isNotNull(); + assertThat(objectMapper.findMixInClassFor(JobParameter.class)).isNotNull(); + assertThat(objectMapper.findMixInClassFor(JobInstance.class)).isNotNull(); + assertThat(objectMapper.findMixInClassFor(ExitStatus.class)).isNotNull(); + assertThat(objectMapper.findMixInClassFor(StepExecution.class)).isNotNull(); + assertThat(objectMapper.findMixInClassFor(ExecutionContext.class)).isNotNull(); + assertThat(objectMapper.findMixInClassFor(StepExecutionHistory.class)).isNotNull(); } @@ -153,8 +164,8 @@ private void assertCorrectMixins(ObjectMapper objectMapper) { public void testThatPrepareRestTemplateWithNullContructorValueContainsMixins() { final RestTemplate restTemplate = DataFlowTemplate.prepareRestTemplate(null); - assertNotNull(restTemplate); - assertTrue(restTemplate.getErrorHandler() instanceof VndErrorResponseErrorHandler); + assertThat(restTemplate).isNotNull(); + assertThat(restTemplate.getErrorHandler() instanceof VndErrorResponseErrorHandler).isTrue(); assertCorrectMixins(restTemplate); @@ -165,9 +176,9 @@ public void testThatPrepareRestTemplateWithProvidedRestTemplateContainsMixins() final RestTemplate providedRestTemplate = new RestTemplate(); final RestTemplate restTemplate = DataFlowTemplate.prepareRestTemplate(providedRestTemplate); - assertNotNull(restTemplate); - assertTrue(providedRestTemplate == restTemplate); - assertTrue(restTemplate.getErrorHandler() instanceof VndErrorResponseErrorHandler); + assertThat(restTemplate).isNotNull(); + assertThat(restTemplate).isSameAs(providedRestTemplate); + assertThat(restTemplate.getErrorHandler() instanceof VndErrorResponseErrorHandler).isTrue(); assertCorrectMixins(restTemplate); } @@ -181,7 +192,7 @@ public void testPrepareRestTemplateWithRestTemplateThatHasNoMessageConverters() DataFlowTemplate.prepareRestTemplate(providedRestTemplate); } catch (IllegalArgumentException e) { - assertEquals("'messageConverters' must not be empty", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("'messageConverters' must not be empty"); return; } @@ -203,8 +214,7 @@ public void testPrepareRestTemplateWithRestTemplateThatMissesJacksonConverter() DataFlowTemplate.prepareRestTemplate(providedRestTemplate); } catch (IllegalArgumentException e) { - assertEquals("The RestTemplate does not contain a required MappingJackson2HttpMessageConverter.", - e.getMessage()); + assertThat(e.getMessage()).isEqualTo("The RestTemplate does not contain a required MappingJackson2HttpMessageConverter."); return; } @@ -215,11 +225,11 @@ public void testPrepareRestTemplateWithRestTemplateThatMissesJacksonConverter() public void testAllActive() throws Exception{ DataFlowTemplate template = getMockedDataFlowTemplate(true); - assertNotNull(template.taskOperations()); - assertNotNull(template.streamOperations()); - assertNotNull(template.runtimeOperations()); - assertNotNull(template.jobOperations()); - assertNotNull(template.schedulerOperations()); + assertThat(template.taskOperations()).isNotNull(); + assertThat(template.streamOperations()).isNotNull(); + assertThat(template.runtimeOperations()).isNotNull(); + assertThat(template.jobOperations()).isNotNull(); + assertThat(template.schedulerOperations()).isNotNull(); testAlwaysActiveOperations(template); } @@ -228,20 +238,20 @@ public void testAllActive() throws Exception{ public void testAllDeActive() throws Exception{ DataFlowTemplate template = getMockedDataFlowTemplate(false); - assertNull(template.taskOperations()); - assertNull(template.streamOperations()); - assertNull(template.runtimeOperations()); - assertNull(template.jobOperations()); - assertNull(template.schedulerOperations()); + assertThat(template.taskOperations()).isNull(); + assertThat(template.streamOperations()).isNull(); + assertThat(template.runtimeOperations()).isNull(); + assertThat(template.jobOperations()).isNull(); + assertThat(template.schedulerOperations()).isNull(); testAlwaysActiveOperations(template); } private void testAlwaysActiveOperations(DataFlowTemplate template) { //these operations are always active - assertNotNull(template.aboutOperation()); - assertNotNull(template.appRegistryOperations()); - assertNotNull(template.completionOperations()); + assertThat(template.aboutOperation()).isNotNull(); + assertThat(template.appRegistryOperations()).isNotNull(); + assertThat(template.completionOperations()).isNotNull(); } private DataFlowTemplate getMockedDataFlowTemplate(boolean isLinksActive) throws Exception{ @@ -259,6 +269,6 @@ private DataFlowTemplate getMockedDataFlowTemplate(boolean isLinksActive) throws converters.add(new MappingJackson2HttpMessageConverter()); when(restTemplate.getMessageConverters()).thenReturn(converters); URI uri = new URI("foo"); - return new DataFlowTemplate(uri, restTemplate); + return new DataFlowTemplate(uri, restTemplate, mapper); } } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextDeserializationTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextDeserializationTests.java index a8e24d9af4..03c2a96bf5 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextDeserializationTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextDeserializationTests.java @@ -21,20 +21,18 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.item.ExecutionContext; import org.springframework.util.StreamUtils; -import static org.hamcrest.CoreMatchers.containsString; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; +import static org.assertj.core.api.Assertions.within; /** * @author Gunnar Hillert + * @author Corneil du Plessis */ public class ExecutionContextDeserializationTests { @@ -51,11 +49,11 @@ public void testDeserializationOfBasicExecutionContext() throws IOException { ExecutionContext executionContext = objectMapper.readValue(json, new TypeReference() { }); - assertEquals(2, executionContext.entrySet().size()); - assertEquals("org.springframework.cloud.task.app.timestamp.batch.TimestampBatchTaskConfiguration$1", executionContext.get("batch.taskletType")); - assertEquals("org.springframework.batch.core.step.tasklet.TaskletStep", executionContext.get("batch.stepType")); - assertFalse(executionContext.isDirty()); - assertFalse(executionContext.isEmpty()); + assertThat(executionContext.entrySet().size()).isEqualTo(2); + assertThat(executionContext.get("batch.taskletType")).isEqualTo("org.springframework.cloud.task.app.timestamp.batch.TimestampBatchTaskConfiguration$1"); + assertThat(executionContext.get("batch.stepType")).isEqualTo("org.springframework.batch.core.step.tasklet.TaskletStep"); + assertThat(executionContext.isDirty()).isFalse(); + assertThat(executionContext.isEmpty()).isFalse(); } /** @@ -78,11 +76,11 @@ public void testFaultyExecutionContext() throws IOException { ExecutionContext executionContext = objectMapper.readValue(json, new TypeReference() { }); - assertEquals(2, executionContext.entrySet().size()); - assertEquals("org.springframework.cloud.task.app.timestamp.batch.TimestampBatchTaskConfiguration$1", executionContext.get("batch.taskletType")); - assertEquals("org.springframework.batch.core.step.tasklet.TaskletStep", executionContext.get("batch.stepType")); - assertTrue(executionContext.isDirty()); - assertFalse(executionContext.isEmpty()); + assertThat(executionContext.entrySet().size()).isEqualTo(2); + assertThat(executionContext.get("batch.taskletType")).isEqualTo("org.springframework.cloud.task.app.timestamp.batch.TimestampBatchTaskConfiguration$1"); + assertThat(executionContext.get("batch.stepType")).isEqualTo("org.springframework.batch.core.step.tasklet.TaskletStep"); + assertThat(executionContext.isDirty()).isTrue(); + assertThat(executionContext.isEmpty()).isFalse(); } @Test @@ -98,16 +96,16 @@ public void testExecutionContextWithNonStringValues() throws IOException { final ExecutionContext executionContext = objectMapper.readValue(json, new TypeReference() { }); - assertEquals(6, executionContext.entrySet().size()); - assertEquals(1234, executionContext.getInt("barNumber")); - assertEquals("1234", executionContext.getString("barNumberAsString")); + assertThat(executionContext.entrySet().size()).isEqualTo(6); + assertThat(executionContext.getInt("barNumber")).isEqualTo(1234); + assertThat(executionContext.getString("barNumberAsString")).isEqualTo("1234"); try { executionContext.getLong("barNumber"); fail("Expected a ClassCastException to be thrown."); } catch (ClassCastException ce) { - assertThat(ce.getMessage(), containsString("key=[barNumber] is not of type: [class java.lang.Long], it is [(class java.lang.Integer)")); + assertThat(ce.getMessage()).contains("key=[barNumber] is not of type: [class java.lang.Long], it is [(class java.lang.Integer)"); } try { @@ -115,24 +113,24 @@ public void testExecutionContextWithNonStringValues() throws IOException { fail("Expected a ClassCastException to be thrown."); } catch (ClassCastException ce) { - assertThat(ce.getMessage(), containsString("key=[barNumber] is not of type: [class java.lang.Double], it is [(class java.lang.Integer)")); + assertThat(ce.getMessage()).contains("key=[barNumber] is not of type: [class java.lang.Double], it is [(class java.lang.Integer)"); } - assertEquals(22222222222L, executionContext.getLong("longNumber")); + assertThat(executionContext.getLong("longNumber")).isEqualTo(22222222222L); try { executionContext.getInt("longNumber"); fail("Expected a ClassCastException to be thrown."); } catch (ClassCastException ce) { - assertThat(ce.getMessage(), containsString("key=[longNumber] is not of type: [class java.lang.Integer], it is [(class java.lang.Long)")); + assertThat(ce.getMessage()).contains("key=[longNumber] is not of type: [class java.lang.Integer], it is [(class java.lang.Long)"); } - assertEquals("true", executionContext.get("fooBoolean")); - assertEquals(3.5, executionContext.getDouble("floatNumber"), 0.1); - assertEquals("[1,2,3]", executionContext.getString("floatNumberArray")); + assertThat(executionContext.get("fooBoolean")).isEqualTo("true"); + assertThat(executionContext.getDouble("floatNumber")).isCloseTo(3.5, within(0.1)); + assertThat(executionContext.getString("floatNumberArray")).isEqualTo("[1,2,3]"); - assertFalse(executionContext.isDirty()); - assertFalse(executionContext.isEmpty()); + assertThat(executionContext.isDirty()).isFalse(); + assertThat(executionContext.isEmpty()).isFalse(); } } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextSerializationTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextSerializationTests.java index 899aa9ca6a..aa708c2ad8 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextSerializationTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextSerializationTests.java @@ -19,15 +19,16 @@ import java.io.IOException; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.item.ExecutionContext; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** * @author Gunnar Hillert * @author Glenn Renfro + * @author Corneil du Plessis */ public class ExecutionContextSerializationTests { @@ -41,7 +42,7 @@ public void testSerializationOfExecutionContext() throws IOException { final String serializedExecutionContext = objectMapper.writeValueAsString(stepExecutionExecutionContext); final String expectedExecutionContext = "{\"dirty\":true,\"empty\":false,\"values\":[{\"foo\":\"bar\"},{\"foo2\":\"bar2\"}]}"; - assertEquals(expectedExecutionContext, serializedExecutionContext); + assertThat(serializedExecutionContext).isEqualTo(expectedExecutionContext); } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/JobExecutionDeserializationTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/JobExecutionDeserializationTests.java index 5fad4dc1ed..0ed9848ddb 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/JobExecutionDeserializationTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/JobExecutionDeserializationTests.java @@ -21,7 +21,7 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.StepExecution; import org.springframework.batch.item.ExecutionContext; @@ -30,12 +30,12 @@ import org.springframework.hateoas.PagedModel; import org.springframework.util.StreamUtils; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.assertj.core.api.Assertions.assertThat; /** * @author Gunnar Hillert * @author Glenn Renfro + * @author Corneil du Plessis */ public class JobExecutionDeserializationTests { @@ -53,10 +53,10 @@ public void testDeserializationOfMultipleJobExecutions() throws IOException { new TypeReference>>() { }); final JobExecutionResource jobExecutionResource = paged.getContent().iterator().next().getContent(); - assertEquals("Expect 1 JobExecutionInfoResource", 6, paged.getContent().size()); - assertEquals(Long.valueOf(6), jobExecutionResource.getJobId()); - assertEquals("job200616815", jobExecutionResource.getName()); - assertEquals("COMPLETED", jobExecutionResource.getJobExecution().getStatus().name()); + assertThat(paged.getContent().size()).as("Expect 1 JobExecutionInfoResource").isEqualTo(6); + assertThat(jobExecutionResource.getJobId()).isEqualTo(Long.valueOf(6)); + assertThat(jobExecutionResource.getName()).isEqualTo("job200616815"); + assertThat(jobExecutionResource.getJobExecution().getStatus().name()).isEqualTo("COMPLETED"); } @Test @@ -71,19 +71,19 @@ public void testDeserializationOfSingleJobExecution() throws IOException { final JobExecutionResource jobExecutionInfoResource = objectMapper.readValue(json, JobExecutionResource.class); - assertNotNull(jobExecutionInfoResource); - assertEquals(Long.valueOf(1), jobExecutionInfoResource.getJobId()); - assertEquals("ff.job", jobExecutionInfoResource.getName()); - assertEquals("COMPLETED", jobExecutionInfoResource.getJobExecution().getStatus().name()); - assertEquals(1, jobExecutionInfoResource.getJobExecution().getStepExecutions().size()); + assertThat(jobExecutionInfoResource).isNotNull(); + assertThat(jobExecutionInfoResource.getJobId()).isEqualTo(Long.valueOf(1)); + assertThat(jobExecutionInfoResource.getName()).isEqualTo("ff.job"); + assertThat(jobExecutionInfoResource.getJobExecution().getStatus().name()).isEqualTo("COMPLETED"); + assertThat(jobExecutionInfoResource.getJobExecution().getStepExecutions().size()).isEqualTo(1); final StepExecution stepExecution = jobExecutionInfoResource.getJobExecution().getStepExecutions().iterator().next(); - assertNotNull(stepExecution); + assertThat(stepExecution).isNotNull(); final ExecutionContext stepExecutionExecutionContext = stepExecution.getExecutionContext(); - assertNotNull(stepExecutionExecutionContext); - assertEquals(2, stepExecutionExecutionContext.size()); + assertThat(stepExecutionExecutionContext).isNotNull(); + assertThat(stepExecutionExecutionContext.size()).isEqualTo(2); } } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/RuntimeTemplateTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/RuntimeTemplateTests.java new file mode 100644 index 0000000000..3ca2db3b37 --- /dev/null +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/RuntimeTemplateTests.java @@ -0,0 +1,146 @@ +/* + * Copyright 2022-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.rest.client; + +import java.util.Collections; +import java.util.Map; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.cloud.skipper.domain.ActuatorPostRequest; +import org.springframework.hateoas.Link; +import org.springframework.hateoas.RepresentationModel; +import org.springframework.web.client.RestTemplate; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Unit tests for {@link RuntimeTemplate}. + * + * @author Chris Bono + */ +class RuntimeTemplateTests { + + private RuntimeTemplate runtimeTemplate; + + private RestTemplate restTemplate; + + private RepresentationModel resources; + + private final String appId = "flipflop3.log-v1"; + + private final String instanceId = "flipflop3.log-v1-0"; + + private final String endpoint = "info"; + + @BeforeEach + void prepareUriTemplate() { + Link actuatorGetLink = mock(Link.class); + when(actuatorGetLink.getHref()).thenReturn("actuator-get-link"); + + Link actuatorPostLink = mock(Link.class); + when(actuatorPostLink.getHref()).thenReturn("actuator-post-link"); + + Link actuatorLink = mock(Link.class); + when(actuatorLink.expand(appId, instanceId, endpoint)).thenReturn(actuatorGetLink); + when(actuatorLink.expand(appId, instanceId)).thenReturn(actuatorPostLink); + + resources = mock(RepresentationModel.class); + when(resources.getLink("runtime/apps")).thenReturn(Optional.of(mock(Link.class))); + when(resources.getLink("runtime/apps/{appId}")).thenReturn(Optional.of(mock(Link.class))); + when(resources.getLink("runtime/apps/{appId}/instances/{instanceId}/actuator")).thenReturn(Optional.of(actuatorLink)); + when(resources.getLink("runtime/streams/{streamNames}")).thenReturn(Optional.of(mock(Link.class))); + + restTemplate = mock(RestTemplate.class); + runtimeTemplate = new RuntimeTemplate(restTemplate, resources); + + // Test Premise: + // Mocks are constructed in manner that ensures only requests for our chosen appId/instanceId/endpoint will + // result in a non-null answer to 'Link.getHref' (which is then passed into the RestTemplate). + } + + @Test + void getFromActuator() { + runtimeTemplate.getFromActuator(appId, instanceId, endpoint); + verify(restTemplate).getForObject("actuator-get-link", String.class); + } + + @Test + void postToActuatorWithBodyMap() { + Map body = Collections.singletonMap("name", "extra"); + ActuatorPostRequest expectedPostRequest = new ActuatorPostRequest(); + expectedPostRequest.setEndpoint(endpoint); + expectedPostRequest.setBody(body); + runtimeTemplate.postToActuator(appId, instanceId, endpoint, body); + verify(restTemplate).postForObject(eq("actuator-post-link"), eq(expectedPostRequest), eq(Object.class)); + } + + @Test + void postToActuatorWithEmptyBodyMap() { + ActuatorPostRequest expectedPostRequest = new ActuatorPostRequest(); + expectedPostRequest.setEndpoint(endpoint); + expectedPostRequest.setBody(Collections.emptyMap()); + runtimeTemplate.postToActuator(appId, instanceId, endpoint, Collections.emptyMap()); + verify(restTemplate).postForObject(eq("actuator-post-link"), eq(expectedPostRequest), eq(Object.class)); + } + + @Test + void postToActuatorWithNullBodyMap() { + ActuatorPostRequest expectedPostRequest = new ActuatorPostRequest(); + expectedPostRequest.setEndpoint(endpoint); + runtimeTemplate.postToActuator(appId, instanceId, endpoint, null); + verify(restTemplate).postForObject(eq("actuator-post-link"), eq(expectedPostRequest), eq(Object.class)); + } + + @Test + void appStatusesUriTemplateIsRequired() { + when(resources.getLink("runtime/apps")).thenReturn(Optional.empty()); + assertThatThrownBy(() -> new RuntimeTemplate(restTemplate, resources)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Unable to retrieve URI template for runtime/apps"); + } + + @Test + void appStatusUriTemplateIsRequired() { + when(resources.getLink("runtime/apps/{appId}")).thenReturn(Optional.empty()); + assertThatThrownBy(() -> new RuntimeTemplate(restTemplate, resources)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Unable to retrieve URI template for runtime/apps/{appId}"); + } + + @Test + void streamStatusUriTemplateIsRequired() { + when(resources.getLink("runtime/streams/{streamNames}")).thenReturn(Optional.empty()); + assertThatThrownBy(() -> new RuntimeTemplate(restTemplate, resources)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Unable to retrieve URI template for runtime/streams/{streamNames}"); + } + + @Test + void actuatorUriTemplateIsNotRequiredForBackwardsCompatibility() { + when(resources.getLink("runtime/apps/{appId}/instances/{instanceId}/actuator")).thenReturn(Optional.empty()); + RuntimeTemplate runtimeTemplate = new RuntimeTemplate(restTemplate, resources); + assertThat(runtimeTemplate).hasFieldOrPropertyWithValue("appActuatorUriTemplate", null); + } +} diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/SchedulerTemplateTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/SchedulerTemplateTests.java index a0ab353b83..80b6e8c9a4 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/SchedulerTemplateTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/SchedulerTemplateTests.java @@ -21,8 +21,8 @@ import java.util.Map; import java.util.Optional; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.springframework.cloud.dataflow.rest.resource.RootResource; @@ -38,6 +38,7 @@ /** * @author Glenn Renfro + * @author Corneil du Plessis */ public class SchedulerTemplateTests { private static final String SCHEDULES_RELATION = org.springframework.cloud.dataflow.rest.client.SchedulerTemplate.SCHEDULES_RELATION; @@ -49,11 +50,11 @@ public class SchedulerTemplateTests { private RestTemplate restTemplate; private SchedulerTemplate template; - @Before + @BeforeEach public void setup() { rootResource = mock(RootResource.class); - when(rootResource.getLink(SCHEDULES_RELATION)).thenReturn(Optional.of(new Link(SCHEDULES_RELATION))); - when(rootResource.getLink(SCHEDULES_RELATION_INSTANCE)).thenReturn(Optional.of(new Link(SCHEDULES_RELATION_INSTANCE))); + when(rootResource.getLink(SCHEDULES_RELATION)).thenReturn(Optional.of(Link.of(SCHEDULES_RELATION))); + when(rootResource.getLink(SCHEDULES_RELATION_INSTANCE)).thenReturn(Optional.of(Link.of(SCHEDULES_RELATION_INSTANCE))); restTemplate = mock(RestTemplate.class); template = new SchedulerTemplate(restTemplate, rootResource); } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/TaskTemplateTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/TaskTemplateTests.java index 8120e4344e..003cf55c9f 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/TaskTemplateTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/TaskTemplateTests.java @@ -20,20 +20,21 @@ import java.util.Map; import java.util.Optional; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.hateoas.Link; import org.springframework.hateoas.RepresentationModel; import org.springframework.web.client.RestTemplate; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; /** * Test the {@link TaskTemplate} implementation of {@link TaskOperations}. * * @author Glenn Renfro + * @author Corneil du Plessis */ public class TaskTemplateTests { @@ -41,10 +42,9 @@ public class TaskTemplateTests { private RestTemplate restTemplate; - @Before + @BeforeEach public void setup() { restTemplate = mock(RestTemplate.class); - } @Test @@ -54,33 +54,32 @@ public void testOldDataFlow() { @Test public void testMinDataFlow() { - validateExecutionLinkPresent("1.7.0"); + validateExecutionLinkPresent("2.10.0"); } @Test public void testFutureDataFlow() { - validateExecutionLinkPresent("1.8.0"); - validateExecutionLinkPresent("1.9.0"); - validateExecutionLinkPresent("2.0.0"); + validateExecutionLinkPresent("2.11.6"); } private void validateExecutionLinkPresent(String dataFlowVersion) { TestResource testResource = new TestResource(); new TaskTemplate(this.restTemplate, testResource, dataFlowVersion); - Assert.assertTrue(testResource.isLinkRequested(CURRENT_TASK_EXECUTION_LINK)); + assertThat(testResource.isLinkRequested(CURRENT_TASK_EXECUTION_LINK)).isTrue(); } private void validateExecutionLinkNotPresent(String version) { TestResource testResource = new TestResource(); new TaskTemplate(this.restTemplate, testResource, version); - Assert.assertFalse(testResource.isLinkRequested(CURRENT_TASK_EXECUTION_LINK)); + assertThat(testResource.isLinkRequested(CURRENT_TASK_EXECUTION_LINK)).isFalse(); } - public static class TestResource extends RepresentationModel { + public static class TestResource extends RepresentationModel { - private Map linksRequested = new HashMap<>(); + private final Map linksRequested = new HashMap<>(); + @Override public Optional getLink(String rel) { if (this.linksRequested.containsKey(rel)) { Long count = this.linksRequested.get(rel); @@ -90,16 +89,13 @@ public Optional getLink(String rel) { this.linksRequested.put(rel, 1L); } - return Optional.of(new Link("foo", "bar")); + return Optional.of(Link.of("foo", "bar")); } public boolean isLinkRequested(String linkName) { - boolean result = false; + boolean result = this.linksRequested.containsKey(linkName) && + this.linksRequested.get(linkName) > 1L; - if (this.linksRequested.containsKey(linkName) && - this.linksRequested.get(linkName) > 1L) { - result = true; - } return result; } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/VersionUtilsTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/VersionUtilsTests.java index c43871a4d6..1778c718e1 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/VersionUtilsTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/VersionUtilsTests.java @@ -15,7 +15,7 @@ */ package org.springframework.cloud.dataflow.rest.client; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.rest.client.support.VersionUtils; @@ -23,6 +23,7 @@ /** * @author Mark Pollack + * @author Corneil du Plessis */ public class VersionUtilsTests { diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientPropertiesTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientPropertiesTests.java index def6ed7f98..d87b76e5c0 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientPropertiesTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientPropertiesTests.java @@ -89,7 +89,7 @@ public void testLegacyOauth() { assertThat(properties.getAuthentication().getClientId()).isEqualTo("id1"); assertThat(properties.getAuthentication().getClientSecret()).isEqualTo("secret1"); assertThat(properties.getAuthentication().getTokenUri()).isEqualTo("uri1"); - assertThat(properties.getAuthentication().getScope()).containsExactly("s1", "s2"); + assertThat(properties.getAuthentication().getScope()).containsExactlyInAnyOrder("s1", "s2"); }); } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/dsl/StreamDslTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/dsl/StreamDslTests.java index 7fe03c11ca..97b83bd144 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/dsl/StreamDslTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/dsl/StreamDslTests.java @@ -19,8 +19,8 @@ import java.util.Collections; import java.util.Map; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -41,6 +41,7 @@ import org.springframework.hateoas.PagedModel; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.Mockito.anyBoolean; import static org.mockito.Mockito.anyMap; import static org.mockito.Mockito.anyString; @@ -55,6 +56,7 @@ /** * @author Vinicius Carvalho * @author Christian Tzolov + * @author Corneil du Plessis */ @SuppressWarnings("unchecked") public class StreamDslTests { @@ -68,13 +70,13 @@ public class StreamDslTests { @Mock private RuntimeOperations runtimeOperations; - private StreamApplication timeApplication = new StreamApplication("time"); + private final StreamApplication timeApplication = new StreamApplication("time"); - private StreamApplication filterApplication = new StreamApplication("filter"); + private final StreamApplication filterApplication = new StreamApplication("filter"); - private StreamApplication logApplication = new StreamApplication("log"); + private final StreamApplication logApplication = new StreamApplication("log"); - @Before + @BeforeEach public void init() { MockitoAnnotations.initMocks(this); when(client.streamOperations()).thenReturn(this.streamOperations); @@ -233,11 +235,13 @@ public void testDuplicateNameWithLabel() { eq(false)); } - @Test(expected = IllegalStateException.class) + @Test public void testDuplicateNameNoLabel() { - Stream.builder(client).name("test").source(timeApplication) + assertThatThrownBy(()-> { + Stream.builder(client).name("test").source(timeApplication) .processor(filterApplication).processor(filterApplication) .sink(logApplication).create(); + }).isInstanceOf(IllegalStateException.class); } @Test @@ -277,12 +281,12 @@ public void logs() { AppStatusResource appStatusResource = new AppStatusResource("deploymentId", "deployed"); - appStatusResource.setInstances(new CollectionModel(Arrays.asList(new AppInstanceStatusResource("instanceId", "deployed", - Collections.singletonMap(StreamRuntimePropertyKeys.ATTRIBUTE_SKIPPER_APPLICATION_NAME, "log"))))); - streamStatusResource.setApplications(new CollectionModel<>(Arrays.asList(appStatusResource))); + appStatusResource.setInstances(CollectionModel.of(Collections.singletonList(new AppInstanceStatusResource("instanceId", "deployed", + Collections.singletonMap(StreamRuntimePropertyKeys.ATTRIBUTE_SKIPPER_APPLICATION_NAME, "log"))))); + streamStatusResource.setApplications(CollectionModel.of(Collections.singletonList(appStatusResource))); when(runtimeOperations.streamStatus(ticktockDefinition.getName())) - .thenReturn(new PagedModel(Arrays.asList(streamStatusResource), null)); + .thenReturn(PagedModel.of(Collections.singletonList(streamStatusResource), (PagedModel.PageMetadata) null)); Stream stream = Stream.builder(client).name(ticktockDefinition.getName()).description("demo stream") .definition(ticktockDefinition.getDslText()).create() diff --git a/spring-cloud-dataflow-rest-resource/pom.xml b/spring-cloud-dataflow-rest-resource/pom.xml index cf01980152..c7a59fff25 100644 --- a/spring-cloud-dataflow-rest-resource/pom.xml +++ b/spring-cloud-dataflow-rest-resource/pom.xml @@ -1,13 +1,21 @@ - + 4.0.0 org.springframework.cloud spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-rest-resource + spring-cloud-dataflow-rest-resource + Data Flow Rest Resource jar + + true + 3.4.1 + org.springframework.boot @@ -23,6 +31,7 @@ org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.hateoas @@ -59,21 +68,11 @@ commons-io commons-io + 2.11.0 - junit - junit - test - - - org.hamcrest - hamcrest-core - test - - - org.hamcrest - hamcrest-library - test + org.codehaus.jettison + jettison org.springframework.boot @@ -83,6 +82,53 @@ org.springframework.cloud spring-cloud-skipper + ${project.version} + + + com.jayway.jsonpath + json-path + test - + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/StepExecutionHistory.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/StepExecutionHistory.java index b429c8b2f0..04525cbb2d 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/StepExecutionHistory.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/StepExecutionHistory.java @@ -21,6 +21,8 @@ import org.springframework.batch.core.StepExecution; /** + * Stores the cumulative information for a specific {@link StepExecution}'s history. + * } * @author Glenn Renfro */ public class StepExecutionHistory { @@ -79,14 +81,22 @@ public String getStepName() { return stepName; } + /** + * Returns the number of {@link StepExecution}s are being used for history calculations. + * + * The id of an existing step execution for a specific job execution (required) + * @return the number of {@link StepExecution}s. + */ public int getCount() { return count; } + @Deprecated public CumulativeHistory getCommitCount() { return commitCount; } + @Deprecated public CumulativeHistory getRollbackCount() { return rollbackCount; } @@ -95,30 +105,40 @@ public CumulativeHistory getReadCount() { return readCount; } + @Deprecated public CumulativeHistory getWriteCount() { return writeCount; } + @Deprecated public CumulativeHistory getFilterCount() { return filterCount; } + @Deprecated public CumulativeHistory getReadSkipCount() { return readSkipCount; } + @Deprecated public CumulativeHistory getWriteSkipCount() { return writeSkipCount; } + @Deprecated public CumulativeHistory getProcessSkipCount() { return processSkipCount; } + /** + * Stores the cumulative history for a specified {@link StepExecution}'s duration. + * @return {@link CumulativeHistory} for the duration of a specified {@link StepExecution}. + */ public CumulativeHistory getDuration() { return duration; } + @Deprecated public CumulativeHistory getDurationPerRead() { return durationPerRead; } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecution.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecution.java index 2c029d36da..0c2666e7ff 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecution.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecution.java @@ -33,16 +33,19 @@ public class TaskJobExecution { private final int stepExecutionCount; - public TaskJobExecution(long taskId, JobExecution jobExecution, boolean isTaskDefined) { - this(taskId, jobExecution, isTaskDefined, 0); + private final String schemaTarget; + + public TaskJobExecution(long taskId, JobExecution jobExecution, boolean isTaskDefined, String schemaTarget) { + this(taskId, jobExecution, isTaskDefined, 0, schemaTarget); } - public TaskJobExecution(long taskId, JobExecution jobExecution, boolean isTaskDefined, int stepExecutionCount) { + public TaskJobExecution(long taskId, JobExecution jobExecution, boolean isTaskDefined, int stepExecutionCount, String schemaTarget) { Assert.notNull(jobExecution, "jobExecution must not be null"); this.taskId = taskId; this.jobExecution = jobExecution; this.isTaskDefined = isTaskDefined; this.stepExecutionCount = stepExecutionCount; + this.schemaTarget = schemaTarget; } /** @@ -74,4 +77,19 @@ public boolean isTaskDefined() { public int getStepExecutionCount() { return stepExecutionCount; } + + public String getSchemaTarget() { + return schemaTarget; + } + + @Override + public String toString() { + return "TaskJobExecution{" + + "taskId=" + taskId + + ", isTaskDefined=" + isTaskDefined + + ", jobExecution=" + jobExecution + + ", stepExecutionCount=" + stepExecutionCount + + ", schemaTarget='" + schemaTarget + '\'' + + '}'; + } } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecutionRel.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecutionRel.java index e8a8c0b837..7c08746f4a 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecutionRel.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecutionRel.java @@ -20,7 +20,9 @@ import java.util.Collections; import java.util.List; + import org.springframework.cloud.dataflow.core.TaskManifest; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.util.Assert; @@ -32,7 +34,7 @@ */ public class TaskJobExecutionRel { - private final TaskExecution taskExecution; + private final AggregateTaskExecution taskExecution; private final List jobExecutionIds; @@ -49,7 +51,7 @@ public class TaskJobExecutionRel { * @param taskManifest to be associated with the task execution. * @param composedTaskJobExecution to be associated with the task execution. */ - public TaskJobExecutionRel(TaskExecution taskExecution, List jobExecutionIds, TaskManifest taskManifest, TaskJobExecution composedTaskJobExecution) { + public TaskJobExecutionRel(AggregateTaskExecution taskExecution, List jobExecutionIds, TaskManifest taskManifest, TaskJobExecution composedTaskJobExecution) { Assert.notNull(taskExecution, "taskExecution must not be null"); this.taskExecution = taskExecution; this.taskManifest = taskManifest; @@ -66,7 +68,7 @@ public TaskJobExecutionRel(TaskExecution taskExecution, List jobExecutionI /** * @return the taskExecution for this relationship. */ - public TaskExecution getTaskExecution() { + public AggregateTaskExecution getTaskExecution() { return taskExecution; } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppRegistrationResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppRegistrationResource.java index 5c0da365bf..e224c73cac 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppRegistrationResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppRegistrationResource.java @@ -16,8 +16,9 @@ package org.springframework.cloud.dataflow.rest.resource; -import java.util.HashSet; +import java.util.Set; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.RepresentationModel; @@ -28,6 +29,7 @@ * @author Mark Fisher * @author Patrick Peralta * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ public class AppRegistrationResource extends RepresentationModel { @@ -56,10 +58,14 @@ public class AppRegistrationResource extends RepresentationModel versions; + private Set versions; /** * The label name of the application. @@ -73,7 +79,7 @@ protected AppRegistrationResource() { } public AppRegistrationResource(String name, String type, String uri) { - this(name, type, null, uri, false); + this(name, type, null, uri, null, false); } /** @@ -83,13 +89,15 @@ public AppRegistrationResource(String name, String type, String uri) { * @param type app type * @param version app version * @param uri uri for app resource + * @param bootVersion Spring Boot version of the application * @param defaultVersion is this application selected to the be default version in DSL */ - public AppRegistrationResource(String name, String type, String version, String uri, Boolean defaultVersion) { + public AppRegistrationResource(String name, String type, String version, String uri, AppBootSchemaVersion bootVersion, Boolean defaultVersion) { this.name = name; this.type = type; this.version = version; this.uri = uri; + this.bootVersion = bootVersion; this.defaultVersion = defaultVersion; } @@ -100,14 +108,16 @@ public AppRegistrationResource(String name, String type, String version, String * @param type app type * @param version app version * @param uri uri for app resource + * @param bootVersion Spring Boot version of the application * @param defaultVersion is this application selected to the be default version in DSL * @param versions all the registered versions of this application */ - public AppRegistrationResource(String name, String type, String version, String uri, Boolean defaultVersion, HashSet versions) { + public AppRegistrationResource(String name, String type, String version, String uri, AppBootSchemaVersion bootVersion, Boolean defaultVersion, Set versions) { this.name = name; this.type = type; this.version = version; this.uri = uri; + this.bootVersion = bootVersion; this.defaultVersion = defaultVersion; this.versions = versions; } @@ -119,21 +129,22 @@ public AppRegistrationResource(String name, String type, String version, String * @param type app type * @param version app version * @param uri uri for app resource + * @param bootVersion Spring Boot version of the application * @param defaultVersion is this application selected to the be default version in DSL * @param versions all the registered versions of this application * @param label the label name of the application */ - public AppRegistrationResource(String name, String type, String version, String uri, Boolean defaultVersion, HashSet versions, String label) { + public AppRegistrationResource(String name, String type, String version, String uri, AppBootSchemaVersion bootVersion, Boolean defaultVersion, Set versions, String label) { this.name = name; this.type = type; this.version = version; this.uri = uri; + this.bootVersion = bootVersion; this.defaultVersion = defaultVersion; this.versions = versions; this.label = label; } - /** * @return the name of the app */ @@ -162,6 +173,10 @@ public String getVersion() { return version; } + public AppBootSchemaVersion getBootVersion() { + return bootVersion != null ? bootVersion : AppBootSchemaVersion.defaultVersion(); + } + /** * @return if this app selected to be the default */ @@ -172,7 +187,7 @@ public Boolean getDefaultVersion() { /** * @return all the available versions of the app */ - public HashSet getVersions() { + public Set getVersions() { return this.versions; } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/DetailedAppRegistrationResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/DetailedAppRegistrationResource.java index b920471d79..f420a12fd0 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/DetailedAppRegistrationResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/DetailedAppRegistrationResource.java @@ -24,6 +24,7 @@ import java.util.Set; import org.springframework.boot.configurationmetadata.ConfigurationMetadataProperty; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.hateoas.PagedModel; /** @@ -75,10 +76,11 @@ protected DetailedAppRegistrationResource() { * @param type application type * @param version application version * @param coordinates Maven coordinates for the application artifact + * @param bootVersion Spring Boot version of the application. * @param isDefault is this the default app */ - public DetailedAppRegistrationResource(String name, String type, String version, String coordinates, Boolean isDefault) { - super(name, type, version, coordinates, isDefault); + public DetailedAppRegistrationResource(String name, String type, String version, String coordinates, AppBootSchemaVersion bootVersion, Boolean isDefault) { + super(name, type, version, coordinates, bootVersion, isDefault); } /** @@ -89,7 +91,7 @@ public DetailedAppRegistrationResource(String name, String type, String version, * data */ public DetailedAppRegistrationResource(AppRegistrationResource resource) { - super(resource.getName(), resource.getType(), resource.getVersion(), resource.getUri(), resource.getDefaultVersion()); + super(resource.getName(), resource.getType(), resource.getVersion(), resource.getUri(), resource.getBootVersion(), resource.getDefaultVersion()); } /** diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java index cc5dd8038d..605342983c 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2019 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ * @author Glenn Renfro * @author Gunnar Hillert * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ public class JobExecutionResource extends RepresentationModel { @@ -86,6 +87,8 @@ public class JobExecutionResource extends RepresentationModel { + private long executionId; + private String schemaTarget; + + public LaunchResponseResource() { + } + + public LaunchResponseResource(long executionId, String schemaTarget) { + this.executionId = executionId; + this.schemaTarget = schemaTarget; + } + + public long getExecutionId() { + return executionId; + } + + public String getSchemaTarget() { + return schemaTarget; + } + + public void setExecutionId(long executionId) { + this.executionId = executionId; + } + + public void setSchemaTarget(String schemaTarget) { + this.schemaTarget = schemaTarget; + } +} diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/SchemaVersionTargetResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/SchemaVersionTargetResource.java new file mode 100644 index 0000000000..022037eca4 --- /dev/null +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/SchemaVersionTargetResource.java @@ -0,0 +1,83 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.rest.resource; + +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; +import org.springframework.hateoas.RepresentationModel; + +/** + * Resource for {@link org.springframework.cloud.dataflow.schema.SchemaVersionTarget} + * @author Corneil du Plessis + */ +public class SchemaVersionTargetResource extends RepresentationModel { + private String name; + private AppBootSchemaVersion schemaVersion; + private String taskPrefix; + private String batchPrefix; + private String datasource; + + public SchemaVersionTargetResource() { + } + + public SchemaVersionTargetResource(String name, AppBootSchemaVersion schemaVersion, String taskPrefix, String batchPrefix, String datasource) { + this.name = name; + this.schemaVersion = schemaVersion; + this.taskPrefix = taskPrefix; + this.batchPrefix = batchPrefix; + this.datasource = datasource; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public AppBootSchemaVersion getSchemaVersion() { + return schemaVersion; + } + + public void setSchemaVersion(AppBootSchemaVersion schemaVersion) { + this.schemaVersion = schemaVersion; + } + + public String getTaskPrefix() { + return taskPrefix; + } + + public void setTaskPrefix(String taskPrefix) { + this.taskPrefix = taskPrefix; + } + + public String getBatchPrefix() { + return batchPrefix; + } + + public void setBatchPrefix(String batchPrefix) { + this.batchPrefix = batchPrefix; + } + + public String getDatasource() { + return datasource; + } + + public void setDatasource(String datasource) { + this.datasource = datasource; + } +} diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/SchemaVersionTargetsResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/SchemaVersionTargetsResource.java new file mode 100644 index 0000000000..8dd4d146f8 --- /dev/null +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/SchemaVersionTargetsResource.java @@ -0,0 +1,55 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.rest.resource; + +import java.util.List; + +import org.springframework.hateoas.RepresentationModel; + +/** + * Resource for {@link org.springframework.cloud.dataflow.schema.SchemaVersionTargets} + * @author Corneil du Plessis + */ +public class SchemaVersionTargetsResource extends RepresentationModel { + private String defaultSchemaTarget; + + private List schemas; + + public SchemaVersionTargetsResource() { + } + + public SchemaVersionTargetsResource(String defaultSchemaTarget, List schemas) { + this.defaultSchemaTarget = defaultSchemaTarget; + this.schemas = schemas; + } + + public String getDefaultSchemaTarget() { + return defaultSchemaTarget; + } + + public void setDefaultSchemaTarget(String defaultSchemaTarget) { + this.defaultSchemaTarget = defaultSchemaTarget; + } + + public List getSchemas() { + return schemas; + } + + public void setSchemas(List schemas) { + this.schemas = schemas; + } +} diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/StepExecutionResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/StepExecutionResource.java index 3058f81999..206fd21356 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/StepExecutionResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/StepExecutionResource.java @@ -32,6 +32,8 @@ public class StepExecutionResource extends RepresentationModel { } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResource.java index 969223876f..690cf50ba9 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResource.java @@ -23,9 +23,11 @@ import java.util.Map; import org.springframework.batch.core.JobExecution; + import org.springframework.cloud.dataflow.core.TaskManifest; import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.job.TaskJobExecutionRel; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.RepresentationModel; @@ -38,6 +40,7 @@ * @author Glenn Renfro * @author Gunnar Hillert * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ public class TaskExecutionResource extends RepresentationModel { @@ -119,6 +122,8 @@ public class TaskExecutionResource extends RepresentationModel(); } @@ -142,6 +147,8 @@ public TaskExecutionResource(TaskJobExecutionRel taskJobExecutionRel) { this.endTime = taskJobExecutionRel.getTaskExecution().getEndTime(); this.errorMessage = taskJobExecutionRel.getTaskExecution().getErrorMessage(); this.externalExecutionId = taskJobExecutionRel.getTaskExecution().getExternalExecutionId(); + this.schemaTarget = taskJobExecutionRel.getTaskExecution().getSchemaTarget(); + this.platformName = taskJobExecutionRel.getTaskExecution().getPlatformName(); if (taskJobExecutionRel.getJobExecutionIds() == null) { this.jobExecutionIds = Collections.emptyList(); } @@ -167,12 +174,14 @@ public TaskExecutionResource(TaskJobExecutionRel taskJobExecutionRel) { * {@link TaskExecution}. * * @param taskExecution contains the {@link TaskExecution} + * @param composedTaskJobExecution the optional composed task execution. */ - public TaskExecutionResource(TaskExecution taskExecution, TaskJobExecution composedTaskJobExecution) { + public TaskExecutionResource(AggregateTaskExecution taskExecution, TaskJobExecution composedTaskJobExecution) { Assert.notNull(taskExecution, "taskExecution must not be null"); this.executionId = taskExecution.getExecutionId(); this.exitCode = taskExecution.getExitCode(); this.taskName = taskExecution.getTaskName(); + this.schemaTarget = taskExecution.getSchemaTarget(); this.exitMessage = taskExecution.getExitMessage(); this.arguments = Collections.unmodifiableList(taskExecution.getArguments()); this.startTime = taskExecution.getStartTime(); @@ -190,13 +199,15 @@ public TaskExecutionResource(TaskExecution taskExecution, TaskJobExecution compo * * @param taskExecution contains the {@link TaskExecution} * @param taskManifest contains the (@link TaskManifest} + * @param composedTaskJobExecution The optional composed task execution. */ - public TaskExecutionResource(TaskExecution taskExecution, TaskManifest taskManifest, TaskJobExecution composedTaskJobExecution) { + public TaskExecutionResource(AggregateTaskExecution taskExecution, TaskManifest taskManifest, TaskJobExecution composedTaskJobExecution) { Assert.notNull(taskExecution, "taskExecution must not be null"); Assert.notNull(taskManifest, "taskManifest must not be null"); this.executionId = taskExecution.getExecutionId(); this.exitCode = taskExecution.getExitCode(); this.taskName = taskExecution.getTaskName(); + this.schemaTarget = taskExecution.getSchemaTarget(); this.exitMessage = taskExecution.getExitMessage(); this.arguments = Collections.unmodifiableList(taskExecution.getArguments()); this.startTime = taskExecution.getStartTime(); @@ -279,6 +290,14 @@ public void setPlatformName(String platformName) { this.platformName = platformName; } + public String getSchemaTarget() { + return schemaTarget; + } + + public void setSchemaTarget(String schemaTarget) { + this.schemaTarget = schemaTarget; + } + public void setTaskExecutionStatus(String taskExecutionStatus) { this.taskExecutionStatus = taskExecutionStatus; } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionThinResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionThinResource.java new file mode 100644 index 0000000000..86df34d9c3 --- /dev/null +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionThinResource.java @@ -0,0 +1,220 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.rest.resource; + +import java.util.Date; + +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.hateoas.PagedModel; +import org.springframework.hateoas.RepresentationModel; +import org.springframework.util.StringUtils; + + +/** + * This resource is a match for AggregateTaskExecution and should satisfy UI paging. + * @author Corneil du Plessis + */ +public class TaskExecutionThinResource extends RepresentationModel { + /** + * The unique id associated with the task execution. + */ + private long executionId; + + /** + * The parent task execution id. + */ + private Long parentExecutionId; + + /** + * The recorded exit code for the task. + */ + private Integer exitCode; + + /** + * User defined name for the task. + */ + private String taskName; + + /** + * Time of when the task was started. + */ + private Date startTime; + + /** + * Timestamp of when the task was completed/terminated. + */ + private Date endTime; + + /** + * Message returned from the task or stacktrace. + */ + private String exitMessage; + + private String externalExecutionId; + + + private String errorMessage; + + private String taskExecutionStatus; + + private String composedTaskJobExecutionStatus; + + /** + * @since 2.11.0 + */ + + private String schemaTarget; + + + public TaskExecutionThinResource() { + } + + public TaskExecutionThinResource(AggregateTaskExecution aggregateTaskExecution) { + this.executionId = aggregateTaskExecution.getExecutionId(); + this.schemaTarget = aggregateTaskExecution.getSchemaTarget(); + this.taskName = aggregateTaskExecution.getTaskName(); + this.externalExecutionId = aggregateTaskExecution.getExternalExecutionId(); + this.parentExecutionId =aggregateTaskExecution.getParentExecutionId(); + this.startTime = aggregateTaskExecution.getStartTime(); + this.endTime = aggregateTaskExecution.getEndTime(); + this.exitCode = aggregateTaskExecution.getExitCode(); + this.exitMessage = aggregateTaskExecution.getExitMessage(); + this.errorMessage = aggregateTaskExecution.getErrorMessage(); + this.composedTaskJobExecutionStatus = aggregateTaskExecution.getCtrTaskStatus(); + } + + public long getExecutionId() { + return executionId; + } + + public void setExecutionId(long executionId) { + this.executionId = executionId; + } + + public Long getParentExecutionId() { + return parentExecutionId; + } + + public void setParentExecutionId(Long parentExecutionId) { + this.parentExecutionId = parentExecutionId; + } + + public Integer getExitCode() { + return exitCode; + } + + public void setExitCode(Integer exitCode) { + this.exitCode = exitCode; + } + + public String getTaskName() { + return taskName; + } + + public void setTaskName(String taskName) { + this.taskName = taskName; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public Date getEndTime() { + return endTime; + } + + public void setEndTime(Date endTime) { + this.endTime = endTime; + } + + public String getExitMessage() { + return exitMessage; + } + + public void setExitMessage(String exitMessage) { + this.exitMessage = exitMessage; + } + + public String getExternalExecutionId() { + return externalExecutionId; + } + + public void setExternalExecutionId(String externalExecutionId) { + this.externalExecutionId = externalExecutionId; + } + + public String getErrorMessage() { + return errorMessage; + } + + public void setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + } + + public String getSchemaTarget() { + return schemaTarget; + } + + public void setSchemaTarget(String schemaTarget) { + this.schemaTarget = schemaTarget; + } + + public void setTaskExecutionStatus(String taskExecutionStatus) { + this.taskExecutionStatus = taskExecutionStatus; + } + + /** + * Returns the calculated status of this {@link TaskExecution}. + * + * If {@link #startTime} is + * null, the {@link TaskExecution} is considered to be not running (never executed). + * + * If {@link #endTime} is + * null, the {@link TaskExecution} is considered to be still running: + * {@link TaskExecutionStatus#RUNNING}. If the {@link #endTime} is defined and the + * {@link #exitCode} is non-zero, an status of {@link TaskExecutionStatus#ERROR} is assumed, + * if {@link #exitCode} is zero, {@link TaskExecutionStatus#COMPLETE} is returned. + * + * @return TaskExecutionStatus, never null + */ + public TaskExecutionStatus getTaskExecutionStatus() { + if (StringUtils.hasText(this.taskExecutionStatus)) { + return TaskExecutionStatus.valueOf(this.taskExecutionStatus); + } + if (this.startTime == null) { + return TaskExecutionStatus.UNKNOWN; + } + if (this.endTime == null) { + return TaskExecutionStatus.RUNNING; + } + if (this.composedTaskJobExecutionStatus != null) { + return (this.composedTaskJobExecutionStatus.equals("ABANDONED") || + this.composedTaskJobExecutionStatus.equals("FAILED") || + this.composedTaskJobExecutionStatus.equals("STOPPED")) ? + TaskExecutionStatus.ERROR : TaskExecutionStatus.COMPLETE; + } + return (this.exitCode == null) ? TaskExecutionStatus.RUNNING : + ((this.exitCode == 0) ? TaskExecutionStatus.COMPLETE : TaskExecutionStatus.ERROR); + } + + public static class Page extends PagedModel { + } +} diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/about/AboutResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/about/AboutResource.java index 9c1133e982..e6251b35d6 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/about/AboutResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/about/AboutResource.java @@ -18,10 +18,14 @@ import org.springframework.hateoas.RepresentationModel; +import java.util.HashMap; +import java.util.Map; + /** * Provides meta-information about the Spring Cloud Data Flow server. * * @author Gunnar Hillert + * @author Felipe Gutierrez */ public class AboutResource extends RepresentationModel { @@ -35,6 +39,8 @@ public class AboutResource extends RepresentationModel { private MonitoringDashboardInfo monitoringDashboardInfo = new MonitoringDashboardInfo(); + private Map gitAndBuildInfo = new HashMap<>(); + /** * Default constructor for serialization frameworks. */ @@ -80,4 +86,12 @@ public MonitoringDashboardInfo getMonitoringDashboardInfo() { public void setMonitoringDashboardInfo(MonitoringDashboardInfo monitoringDashboardInfo) { this.monitoringDashboardInfo = monitoringDashboardInfo; } + + public Map getGitAndBuildInfo() { + return gitAndBuildInfo; + } + + public void setGitAndBuildInfo(Map gitAndBuildInfo) { + this.gitAndBuildInfo = gitAndBuildInfo; + } } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/JobParameterJacksonDeserializer.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/JobParameterJacksonDeserializer.java index d6b45f6d1d..ac257085ed 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/JobParameterJacksonDeserializer.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/JobParameterJacksonDeserializer.java @@ -54,7 +54,6 @@ public JobParameter deserialize(JsonParser jsonParser, DeserializationContext de if (!type.isEmpty() && !type.equalsIgnoreCase("STRING")) { if ("DATE".equalsIgnoreCase(type)) { - // TODO: when upgraded to Java8 use java DateTime jobParameter = new JobParameter(DateTime.parse(value).toDate(), identifying); } else if ("DOUBLE".equalsIgnoreCase(type)) { @@ -72,8 +71,8 @@ else if ("LONG".equalsIgnoreCase(type)) { } if (logger.isDebugEnabled()) { - logger.debug(String.format("jobParameter - value: %s (type: %s, isIdentifying: %s)", - jobParameter.getValue(), jobParameter.getType().name(), jobParameter.isIdentifying())); + logger.debug("jobParameter - value: {} (type: {}, isIdentifying: {})", + jobParameter.getValue(), jobParameter.getType().name(), jobParameter.isIdentifying()); } return jobParameter; diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/ArgumentSanitizer.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/ArgumentSanitizer.java index a320fc7b06..54e0dfb666 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/ArgumentSanitizer.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/ArgumentSanitizer.java @@ -17,18 +17,27 @@ package org.springframework.cloud.dataflow.rest.util; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.regex.Pattern; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.springframework.batch.core.JobParameter; import org.springframework.batch.core.JobParameters; import org.springframework.cloud.dataflow.core.DefinitionUtils; import org.springframework.cloud.dataflow.core.TaskDefinition; import org.springframework.cloud.dataflow.core.dsl.TaskParser; import org.springframework.cloud.dataflow.core.dsl.graph.Graph; +import org.springframework.http.HttpHeaders; import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; @@ -38,15 +47,24 @@ * @author Glenn Renfro * @author Gunnar Hillert * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ public class ArgumentSanitizer { + private final static Logger logger = LoggerFactory.getLogger(ArgumentSanitizer.class); - private static final String[] REGEX_PARTS = { "*", "$", "^", "+" }; + private static final String[] REGEX_PARTS = {"*", "$", "^", "+"}; private static final String REDACTION_STRING = "******"; - private static final String[] KEYS_TO_SANITIZE = { "username", "password", "secret", "key", "token", ".*credentials.*", - "vcap_services", "url" }; + private static final String[] KEYS_TO_SANITIZE = {"username", "password", "secret", "key", "token", ".*credentials.*", + "vcap_services", "url"}; + + private final static TypeReference> mapTypeReference = new TypeReference>() { + }; + + private final ObjectMapper yamlMapper = new ObjectMapper(new YAMLFactory()); + + private final ObjectMapper jsonMapper = new ObjectMapper(); private Pattern[] keysToSanitize; @@ -80,6 +98,10 @@ private boolean isRegex(String value) { * @return the argument with a potentially sanitized value */ public String sanitize(String argument) { + // Oracle handles an empty string as a null. + if (argument == null) { + return ""; + } int indexOfFirstEqual = argument.indexOf("="); if (indexOfFirstEqual == -1) { return argument; @@ -95,7 +117,7 @@ public String sanitize(String argument) { /** * Replaces a potential secure value with "******". * - * @param key to check for sensitive words. + * @param key to check for sensitive words. * @param value the argument to cleanse. * @return the argument with a potentially sanitized value */ @@ -118,13 +140,12 @@ public String sanitize(String key, String value) { * @return the sanitized job parameters */ public JobParameters sanitizeJobParameters(JobParameters jobParameters) { - Map newJobParameters = new HashMap<>(); - jobParameters.getParameters().forEach( (key, jobParameter) -> { + Map newJobParameters = new HashMap<>(); + jobParameters.getParameters().forEach((key, jobParameter) -> { String updatedKey = !jobParameter.isIdentifying() ? "-" + key : key; if (jobParameter.getType().equals(JobParameter.ParameterType.STRING)) { newJobParameters.put(updatedKey, new JobParameter(this.sanitize(key, jobParameter.toString()))); - } - else { + } else { newJobParameters.put(updatedKey, jobParameter); } }); @@ -138,7 +159,7 @@ public JobParameters sanitizeJobParameters(JobParameters jobParameters) { * @return Task definition text that has sensitive data redacted. */ public String sanitizeTaskDsl(TaskDefinition taskDefinition) { - if(StringUtils.isEmpty(taskDefinition.getDslText())) { + if (StringUtils.isEmpty(taskDefinition.getDslText())) { return taskDefinition.getDslText(); } TaskParser taskParser = new TaskParser(taskDefinition.getTaskName(), taskDefinition.getDslText(), true, true); @@ -157,13 +178,14 @@ public String sanitizeTaskDsl(TaskDefinition taskDefinition) { /** * For all sensitive properties (e.g. key names containing words like password, secret, * key, token) replace the value with '*****' string + * * @param properties to be sanitized * @return sanitized properties */ public Map sanitizeProperties(Map properties) { if (!CollectionUtils.isEmpty(properties)) { final Map sanitizedProperties = new LinkedHashMap<>(properties.size()); - for (Map.Entry property : properties.entrySet()) { + for (Map.Entry property : properties.entrySet()) { sanitizedProperties.put(property.getKey(), this.sanitize(property.getKey(), property.getValue())); } return sanitizedProperties; @@ -174,6 +196,7 @@ public Map sanitizeProperties(Map properties) { /** * For all sensitive arguments (e.g. key names containing words like password, secret, * key, token) replace the value with '*****' string + * * @param arguments to be sanitized * @return sanitized arguments */ @@ -187,4 +210,95 @@ public List sanitizeArguments(List arguments) { } return arguments; } + + public HttpHeaders sanitizeHeaders(HttpHeaders headers) { + HttpHeaders result = new HttpHeaders(); + for (Map.Entry> entry : headers.entrySet()) { + List values = entry.getValue(); + for (String value : values) { + result.add(entry.getKey(), sanitize(entry.getKey(), value)); + } + } + return result; + } + + /** + * Will replace sensitive string value in the Map with '*****' + * + * @param input to be sanitized + * @return the sanitized map. + */ + public Map sanitizeMap(Map input) { + Map result = new HashMap<>(); + for (Map.Entry entry : input.entrySet()) { + if (entry.getValue() instanceof String) { + result.put(entry.getKey(), sanitize(entry.getKey(), (String) entry.getValue())); + } else if (entry.getValue() instanceof Map) { + Map map = (Map) entry.getValue(); + result.put(entry.getKey(), sanitizeMap(map)); + } else { + result.put(entry.getKey(), entry.getValue()); + } + } + return result; + } + + /** + * Will replace the sensitive string fields with '*****' + * + * @param input to be sanitized + * @return The sanitized JSON string + * @throws JsonProcessingException from mapper. + */ + public String sanitizeJsonString(String input) throws JsonProcessingException { + if (input == null) { + return null; + } + Map data = jsonMapper.readValue(input, mapTypeReference); + return jsonMapper.writeValueAsString(sanitizeMap(data)); + } + + /** + * Will replace the sensitive string fields with '*****' + * + * @param input to be sanitized + * @return The sanitized YAML string + * @throws JsonProcessingException from mapper + */ + public String sanitizeYamlString(String input) throws JsonProcessingException { + if (input == null) { + return null; + } + Map data = yamlMapper.readValue(input, mapTypeReference); + return yamlMapper.writeValueAsString(sanitizeMap(data)); + } + + /** + * Will determine the type of data and treat as JSON or YAML to sanitize sensitive values. + * + * @param input to be sanitized + * @return the sanitized string + */ + public String sanitizeJsonOrYamlString(String input) { + if (input == null) { + return null; + } + try { // Try parsing as JSON + return sanitizeJsonString(input); + } catch (Throwable x) { + logger.trace("Cannot parse as JSON:" + x); + } + try { + return sanitizeYamlString(input); + } catch (Throwable x) { + logger.trace("Cannot parse as YAML:" + x); + } + if (input.contains("\n")) { + return StringUtils.collectionToDelimitedString(sanitizeArguments(Arrays.asList(StringUtils.split(input, "\n"))), "\n"); + } + if (input.contains("--")) { + return StringUtils.collectionToDelimitedString(sanitizeArguments(Arrays.asList(StringUtils.split(input, "--"))), "--"); + } + return sanitize(input); + } } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtils.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtils.java index e9ba376a78..78af4778bd 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtils.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtils.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,11 +33,14 @@ import java.util.stream.Collectors; import org.apache.commons.io.FilenameUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.config.YamlPropertiesFactoryBean; import org.springframework.core.io.FileSystemResource; import org.springframework.util.StringUtils; + /** * Provides utility methods for formatting and parsing deployment properties. * @@ -47,9 +50,10 @@ * @author Christian Tzolov * @author Gunnar Hillert * @author Ilayaperumal Gopinathan + * @author Glenn Renfro */ public final class DeploymentPropertiesUtils { - + private static final Logger logger = LoggerFactory.getLogger(DeploymentPropertiesUtils.class); /** * Pattern used for parsing a String of command-line arguments. */ @@ -85,6 +89,7 @@ public static Map parse(String s) { for (String pair : pairs) { addKeyValuePairAsProperty(pair, deploymentProperties); } + logger.debug("parse:{}={}", s, deploymentProperties); return deploymentProperties; } @@ -108,23 +113,25 @@ public static List parseParamList(String s, String delimiter) { // get raw candidates as simple comma split String[] candidates = StringUtils.delimitedListToStringArray(s, delimiter); for (int i = 0; i < candidates.length; i++) { - if (i > 0 && !candidates[i].contains("=") || (i > 0 && candidates[i].contains("=") && !startsWithDeploymentPropertyPrefix(candidates[i]))) { - // we don't have '=' so this has to be latter parts of - // a comma delimited value, append it to previously added - // key/value pair. - // we skip first as we would not have anything to append to. this - // would happen if dep prop string is malformed and first given - // key/value pair is not actually a key/value. - pairs.set(pairs.size() - 1, pairs.get(pairs.size() - 1) + delimiter + candidates[i]); - } - else { - // we have a key/value pair having '=', or malformed first pair - if (!startsWithDeploymentPropertyPrefix(candidates[i])) { - throw new IllegalArgumentException( - "Only deployment property keys starting with 'app.' or 'scheduler' or 'deployer.' or 'version.'" + - " allowed."); + String candidate = candidates[i]; + if(StringUtils.hasText(candidate)) { + if (i > 0 && !candidate.contains("=") || (i > 0 && candidate.contains("=") && !startsWithDeploymentPropertyPrefix(candidate))) { + // we don't have '=' so this has to be latter parts of + // a comma delimited value, append it to previously added + // key/value pair. + // we skip first as we would not have anything to append to. this + // would happen if dep prop string is malformed and first given + // key/value pair is not actually a key/value. + pairs.set(pairs.size() - 1, pairs.get(pairs.size() - 1) + delimiter + candidate); + } else { + // we have a key/value pair having '=', or malformed first pair + if (!startsWithDeploymentPropertyPrefix(candidate)) { + throw new IllegalArgumentException( + "Only deployment property keys starting with 'app.' or 'scheduler' or 'deployer.' or 'version.'" + + " allowed. Not " + candidate); + } + pairs.add(candidate); } - pairs.add(candidates[i]); } } @@ -154,7 +161,9 @@ public static List parseArgumentList(String s, String delimiter) { for (int i = 0; i < candidates.length; i++) { int elementsInQuotesIndex = findEndToken(candidates, i) +1; if (elementsInQuotesIndex > -1) { - pairs.add(candidates[i]); + if(!candidates[i].equals("")) { + pairs.add(candidates[i]); + } i++; for (; i < elementsInQuotesIndex; i++) { pairs.set(pairs.size() - 1, pairs.get(pairs.size() - 1) + delimiter + candidates[i]); @@ -174,10 +183,22 @@ public static List parseArgumentList(String s, String delimiter) { } else { // we have a key/value pair having '=', or malformed first pair - pairs.add(candidates[i]); + if(!candidates[i].equals("")) { + int endToken = findEndToken(candidates, i); + if(endToken > -1) { + pairs.add(candidates[i] + " " + candidates[endToken]); + i = endToken; + } + else { + pairs.add(candidates[i]); + } + } } } - } + for(int i = 0; i < pairs.size(); i++) { + pairs.set(i, StringUtils.trimTrailingWhitespace(pairs.get(i))); + } + } return pairs; } @@ -313,9 +334,9 @@ public static Map extractAndQualifyDeployerProperties(Map kv.getKey().startsWith(wildcardPrefix) || kv.getKey().startsWith(appPrefix)) .collect(Collectors.toMap(kv -> kv.getKey().startsWith(wildcardPrefix) ? "spring.cloud.deployer." + kv.getKey().substring(wildcardLength) - : "spring.cloud.deployer." + kv.getKey().substring(appLength), kv -> kv.getValue(), + : "spring.cloud.deployer." + kv.getKey().substring(appLength), Entry::getValue, (fromWildcard, fromApp) -> fromApp)); - + logger.debug("extractAndQualifyDeployerProperties:{}", result); return result; } @@ -340,15 +361,16 @@ public static Map qualifyDeployerProperties(Map .filter(kv -> kv.getKey().startsWith(wildcardPrefix) || kv.getKey().startsWith(appPrefix)) .collect(Collectors.toMap(kv -> kv.getKey().startsWith(wildcardPrefix) ? "spring.cloud.deployer." + kv.getKey().substring(wildcardLength) - : "spring.cloud.deployer." + kv.getKey().substring(appLength), kv -> kv.getValue(), + : "spring.cloud.deployer." + kv.getKey().substring(appLength), Entry::getValue, (fromWildcard, fromApp) -> fromApp)); Map resultApp = new TreeMap<>(input).entrySet().stream() .filter(kv -> !kv.getKey().startsWith(wildcardPrefix) && !kv.getKey().startsWith(appPrefix)) - .collect(Collectors.toMap(kv -> kv.getKey(), kv -> kv.getValue(), + .collect(Collectors.toMap(Entry::getKey, Entry::getValue, (fromWildcard, fromApp) -> fromApp)); resultDeployer.putAll(resultApp); + logger.debug("qualifyDeployerProperties:{}", resultDeployer); return resultDeployer; } @@ -419,8 +441,8 @@ public static List removeQuoting(List params) { } start = regexMatcher.start(); } - if (param != null && param.length() > 0) { - String p = removeQuoting(param.substring(start, param.length()).trim()); + if (param != null && !param.isEmpty()) { + String p = removeQuoting(param.substring(start).trim()); if (StringUtils.hasText(p)) { paramsToUse.add(p); } diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/job/support/JobUtilsTests.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/job/support/JobUtilsTests.java index 9343739c88..eddeabe1ba 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/job/support/JobUtilsTests.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/job/support/JobUtilsTests.java @@ -15,18 +15,20 @@ */ package org.springframework.cloud.dataflow.rest.job.support; -import org.junit.Test; + +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** * @author Gunnar Hillert + * @author Corneil du Plessis * @since 1.0 */ public class JobUtilsTests { diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/DeploymentStateResourceTests.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/DeploymentStateResourceTests.java index ab155b01af..00eb6e6aa3 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/DeploymentStateResourceTests.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/DeploymentStateResourceTests.java @@ -20,13 +20,15 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.jayway.jsonpath.DocumentContext; import com.jayway.jsonpath.JsonPath; -import org.junit.Test; +import org.junit.jupiter.api.Test; + + +import static org.junit.jupiter.api.Assertions.*; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; /** * @author Gunnar Hillert + * @author Corneil du Plessis */ public class DeploymentStateResourceTests { @@ -40,9 +42,9 @@ public void testSerializationOfSingleStepExecution() throws JsonProcessingExcept final DocumentContext documentContext = JsonPath.parse(result); - assertThat(documentContext.read("$.key"), is("deployed")); - assertThat(documentContext.read("$.displayName"), is("Deployed")); - assertThat(documentContext.read("$.description"), is("The stream has been successfully deployed")); + assertEquals("deployed", documentContext.read("$.key")); + assertEquals("Deployed", documentContext.read("$.displayName")); + assertEquals("The stream has been successfully deployed", documentContext.read("$.description")); } diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/HttpClientTest.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/HttpClientTest.java index 65c5b15963..11fc374074 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/HttpClientTest.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/HttpClientTest.java @@ -22,16 +22,19 @@ import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; -import org.assertj.core.api.Assertions; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.rest.util.CheckableResource; import org.springframework.cloud.dataflow.rest.util.HttpClientConfigurer; import org.springframework.cloud.dataflow.rest.util.ResourceBasedAuthorizationInterceptor; import org.springframework.core.io.ByteArrayResource; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; + /** * @author Mike Heath + * @author Corneil du Plessis */ public class HttpClientTest { @@ -57,48 +60,52 @@ public void check() throws IOException { } } - @Test(expected = Passed.class) + @Test public void resourceBasedAuthorizationHeader() throws Exception { final String credentials = "Super Secret Credentials"; final CheckableResource resource = new ByteArrayCheckableResource(credentials.getBytes(), null); final URI targetHost = new URI("http://test.com"); - try (final CloseableHttpClient client = HttpClientConfigurer.create(targetHost) - .addInterceptor(new ResourceBasedAuthorizationInterceptor(resource)) - .addInterceptor((request, context) -> { - final String authorization = request.getFirstHeader(HttpHeaders.AUTHORIZATION).getValue(); - Assertions.assertThat(authorization).isEqualTo(credentials); - - // Throw an exception to short-circuit making an HTTP request - throw new Passed(); - }) - .buildHttpClient()) { - client.execute(new HttpGet(targetHost)); - } + assertThatExceptionOfType(Passed.class).isThrownBy(() -> { + try (final CloseableHttpClient client = HttpClientConfigurer.create(targetHost) + .addInterceptor(new ResourceBasedAuthorizationInterceptor(resource)) + .addInterceptor((request, context) -> { + final String authorization = request.getFirstHeader(HttpHeaders.AUTHORIZATION).getValue(); + assertThat(authorization).isEqualTo(credentials); + + // Throw an exception to short-circuit making an HTTP request + throw new Passed(); + }) + .buildHttpClient()) { + client.execute(new HttpGet(targetHost)); + } + }); } static final class Passed extends RuntimeException { } - @Test(expected = TestException.class) + @Test public void resourceBasedAuthorizationHeaderResourceCheck() throws Exception { final String credentials = "Super Secret Credentials"; final CheckableResource resource = new ByteArrayCheckableResource(credentials.getBytes(), new TestException()); final URI targetHost = new URI("http://test.com"); - try (final CloseableHttpClient client = HttpClientConfigurer.create(targetHost) - .addInterceptor(new ResourceBasedAuthorizationInterceptor(resource)) - .addInterceptor((request, context) -> { - final String authorization = request.getFirstHeader(HttpHeaders.AUTHORIZATION).getValue(); - Assertions.assertThat(authorization).isEqualTo(credentials); - - // Throw an exception to short-circuit making an HTTP request - throw new Passed(); - }) - .buildHttpClient()) { - client.execute(new HttpGet(targetHost)); - } + assertThatExceptionOfType(TestException.class).isThrownBy(() -> { + try (final CloseableHttpClient client = HttpClientConfigurer.create(targetHost) + .addInterceptor(new ResourceBasedAuthorizationInterceptor(resource)) + .addInterceptor((request, context) -> { + final String authorization = request.getFirstHeader(HttpHeaders.AUTHORIZATION).getValue(); + assertThat(authorization).isEqualTo(credentials); + + // Throw an exception to short-circuit making an HTTP request + throw new Passed(); + }) + .buildHttpClient()) { + client.execute(new HttpGet(targetHost)); + } + }); } } diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResourceTests.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResourceTests.java index e7c25cb710..468e71398b 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResourceTests.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResourceTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,20 +20,23 @@ import java.util.Collections; import java.util.Date; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.JobExecution; import org.springframework.cloud.dataflow.core.TaskManifest; import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.job.TaskJobExecutionRel; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.deployer.spi.core.AppDefinition; import org.springframework.cloud.deployer.spi.core.AppDeploymentRequest; -import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.core.io.UrlResource; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.assertj.core.api.Assertions.assertThat; /** * Provides tests for the {@link TaskExecutionResourceTests} class. @@ -41,106 +44,131 @@ * @author Gunnar Hillert * @author Ilayaperumal Gopinathan * @author Glenn Renfro + * @author Corneil du Plessis */ public class TaskExecutionResourceTests { @Test - public void testTaskExecutionStatusWithNoTaskExecutionSet() { + public void testTaskExecutionStatusWithNoTaskExecutionSet() { final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(); - assertEquals(TaskExecutionStatus.UNKNOWN, taskExecutionResource.getTaskExecutionStatus()); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.UNKNOWN); } @Test - public void testTaskExecutionStatusWithNoStartTime() { - final TaskExecution taskExecution = new TaskExecution(); - final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); - assertEquals(TaskExecutionStatus.UNKNOWN, taskExecutionResource.getTaskExecutionStatus()); + public void testTaskExecutionStatusWithNoStartTime() { + for (AppBootSchemaVersion version : AppBootSchemaVersion.values()) { + SchemaVersionTarget target = SchemaVersionTarget.createDefault(version); + final AggregateTaskExecution taskExecution = new AggregateTaskExecution(); + taskExecution.setSchemaTarget(target.getName()); + final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.UNKNOWN); + } } @Test - public void testTaskExecutionStatusWithRunningTaskExecution() { - final TaskExecution taskExecution = new TaskExecution(); - taskExecution.setStartTime(new Date()); - final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); - assertEquals(TaskExecutionStatus.RUNNING, taskExecutionResource.getTaskExecutionStatus()); - assertNull(taskExecutionResource.getExitCode()); + public void testTaskExecutionStatusWithRunningTaskExecution() { + for (AppBootSchemaVersion version : AppBootSchemaVersion.values()) { + SchemaVersionTarget target = SchemaVersionTarget.createDefault(version); + final AggregateTaskExecution taskExecution = new AggregateTaskExecution(); + taskExecution.setSchemaTarget(target.getName()); + taskExecution.setStartTime(new Date()); + final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.RUNNING); + assertThat(taskExecutionResource.getExitCode()).isNull(); + } } @Test - public void testTaskExecutionStatusWithSuccessfulTaskExecution() { - final TaskExecution taskExecution = getDefaultTaskExecution(); - final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); - assertEquals(TaskExecutionStatus.COMPLETE, taskExecutionResource.getTaskExecutionStatus()); + public void testTaskExecutionStatusWithSuccessfulTaskExecution() { + for (AppBootSchemaVersion version : AppBootSchemaVersion.values()) { + SchemaVersionTarget target = SchemaVersionTarget.createDefault(version); + final AggregateTaskExecution taskExecution = getDefaultTaskExecution(target.getName()); + final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.COMPLETE); + } } @Test - public void testCTRExecutionStatusWithSuccessfulJobExecution() { - final TaskExecution taskExecution = getDefaultTaskExecution(); - JobExecution jobExecution = new JobExecution(1L); - jobExecution.setExitStatus(ExitStatus.COMPLETED); - TaskJobExecution taskJobExecution = new TaskJobExecution(taskExecution.getExecutionId(), jobExecution, true); - final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, taskJobExecution); - assertEquals(TaskExecutionStatus.COMPLETE, taskExecutionResource.getTaskExecutionStatus()); + public void testCTRExecutionStatusWithSuccessfulJobExecution() { + for (AppBootSchemaVersion version : AppBootSchemaVersion.values()) { + SchemaVersionTarget target = SchemaVersionTarget.createDefault(version); + final AggregateTaskExecution taskExecution = getDefaultTaskExecution(target.getName()); + JobExecution jobExecution = new JobExecution(1L); + jobExecution.setExitStatus(ExitStatus.COMPLETED); + TaskJobExecution taskJobExecution = new TaskJobExecution(taskExecution.getExecutionId(), jobExecution, true, target.getName()); + final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, taskJobExecution); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.COMPLETE); + + } } @Test - public void testCTRExecutionStatusWithFailedJobExecution() { - final TaskExecution taskExecution = new TaskExecution(); + public void testCTRExecutionStatusWithFailedJobExecution() { + final AggregateTaskExecution taskExecution = new AggregateTaskExecution(); taskExecution.setStartTime(new Date()); taskExecution.setEndTime(new Date()); taskExecution.setExitCode(0); JobExecution jobExecution = new JobExecution(1L); jobExecution.setExitStatus(ExitStatus.FAILED); - TaskJobExecution taskJobExecution = new TaskJobExecution(taskExecution.getExecutionId(), jobExecution, true); + final String defaultSchemaTarget = SchemaVersionTarget.defaultTarget().getName(); + TaskJobExecution taskJobExecution = new TaskJobExecution(taskExecution.getExecutionId(), jobExecution, true, defaultSchemaTarget); final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, taskJobExecution); - assertEquals(TaskExecutionStatus.ERROR, taskExecutionResource.getTaskExecutionStatus()); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.ERROR); } @Test - public void testTaskExecutionStatusWithFailedTaskExecution() { - final TaskExecution taskExecution = new TaskExecution(); + public void testTaskExecutionStatusWithFailedTaskExecution() { + final AggregateTaskExecution taskExecution = new AggregateTaskExecution(); taskExecution.setStartTime(new Date()); taskExecution.setEndTime(new Date()); taskExecution.setExitCode(123); final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); - assertEquals(TaskExecutionStatus.ERROR, taskExecutionResource.getTaskExecutionStatus()); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.ERROR); } @Test - public void testTaskExecutionForTaskExecutionRel() throws Exception{ - final TaskExecution taskExecution = getDefaultTaskExecution(); - TaskManifest taskManifest = new TaskManifest(); - taskManifest.setPlatformName("testplatform"); - taskManifest.setTaskDeploymentRequest(new AppDeploymentRequest(new AppDefinition("testapp", Collections.emptyMap()), new UrlResource("http://foo"))); - TaskJobExecutionRel taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), taskManifest, null); - TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); - assertEquals("testplatform", taskExecutionResource.getPlatformName()); - assertEquals(TaskExecutionStatus.COMPLETE, taskExecutionResource.getTaskExecutionStatus()); - taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), null, null); - taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); - assertNull(taskExecutionResource.getPlatformName()); - assertEquals(TaskExecutionStatus.COMPLETE, taskExecutionResource.getTaskExecutionStatus()); - JobExecution jobExecution = new JobExecution(1L, null, "foo"); - jobExecution.setExitStatus(ExitStatus.FAILED); - - TaskJobExecution ctrTaskJobExecution = new TaskJobExecution(1, jobExecution, true); - taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), null, ctrTaskJobExecution); - taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); - assertNull(taskExecutionResource.getPlatformName()); - assertEquals(TaskExecutionStatus.ERROR, taskExecutionResource.getTaskExecutionStatus()); - jobExecution.setExitStatus(ExitStatus.COMPLETED); - ctrTaskJobExecution = new TaskJobExecution(1, jobExecution, true); - taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), null, ctrTaskJobExecution); - taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); - assertNull(taskExecutionResource.getPlatformName()); - assertEquals(TaskExecutionStatus.COMPLETE, taskExecutionResource.getTaskExecutionStatus()); + public void testTaskExecutionForTaskExecutionRel() throws Exception { + for (AppBootSchemaVersion version : AppBootSchemaVersion.values()) { + SchemaVersionTarget target = SchemaVersionTarget.createDefault(version); + + final AggregateTaskExecution taskExecution = getDefaultTaskExecution(target.getName()); + TaskManifest taskManifest = new TaskManifest(); + taskManifest.setPlatformName("testplatform"); + taskManifest.setTaskDeploymentRequest(new AppDeploymentRequest(new AppDefinition("testapp", Collections.emptyMap()), new UrlResource("http://foo"))); + TaskJobExecutionRel taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), taskManifest, null); + TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); + assertThat(taskExecutionResource.getPlatformName()).isEqualTo("testplatform"); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.COMPLETE); + taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), null, null); + taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); + assertThat(taskExecutionResource.getPlatformName()).isNull(); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.COMPLETE); + JobExecution jobExecution = new JobExecution(1L, null, "foo"); + jobExecution.setExitStatus(ExitStatus.FAILED); + + TaskJobExecution ctrTaskJobExecution = new TaskJobExecution(1, jobExecution, true, target.getName()); + taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), null, ctrTaskJobExecution); + taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); + assertThat(taskExecutionResource.getPlatformName()).isNull(); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.ERROR); + jobExecution.setExitStatus(ExitStatus.COMPLETED); + ctrTaskJobExecution = new TaskJobExecution(1, jobExecution, true, target.getName()); + taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), null, ctrTaskJobExecution); + taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); + assertThat(taskExecutionResource.getPlatformName()).isNull(); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.COMPLETE); + } } - private TaskExecution getDefaultTaskExecution() { - final TaskExecution taskExecution = new TaskExecution(); + private AggregateTaskExecution getDefaultTaskExecution(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + final AggregateTaskExecution taskExecution = new AggregateTaskExecution(); taskExecution.setStartTime(new Date()); taskExecution.setEndTime(new Date()); taskExecution.setExitCode(0); + taskExecution.setSchemaTarget(schemaTarget); return taskExecution; } diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/support/jackson/StepExecutionJacksonMixInTests.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/support/jackson/StepExecutionJacksonMixInTests.java index 7cab42c7bd..928867702c 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/support/jackson/StepExecutionJacksonMixInTests.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/support/jackson/StepExecutionJacksonMixInTests.java @@ -16,23 +16,29 @@ package org.springframework.cloud.dataflow.rest.support.jackson; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Test; +import com.jayway.jsonpath.DocumentContext; +import com.jayway.jsonpath.JsonPath; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.StepExecution; import org.springframework.batch.item.ExecutionContext; -import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.CoreMatchers.not; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; /** * Tests that the {@link ExecutionContextJacksonMixIn} works as expected. * * @author Gunnar Hillert + * @author Corneil du Plessis */ public class StepExecutionJacksonMixInTests { @@ -42,15 +48,20 @@ public class StepExecutionJacksonMixInTests { * * @throws JsonProcessingException if a Json generation error occurs. */ - @Test(expected = JsonMappingException.class) + @Test public void testSerializationOfSingleStepExecutionWithoutMixin() throws JsonProcessingException { - - final ObjectMapper objectMapper = new ObjectMapper(); - - final StepExecution stepExecution = getStepExecution(); - final String result = objectMapper.writeValueAsString(stepExecution); - - assertThat(result, containsString("\"executionContext\":{\"dirty\":true,\"empty\":false}")); + assertThatExceptionOfType(JsonMappingException.class).isThrownBy(() -> { + final ObjectMapper objectMapper = new ObjectMapper(); + final StepExecution stepExecution = getStepExecution(); + final String result = objectMapper.writeValueAsString(stepExecution); + DocumentContext parsed = JsonPath.parse(result); + Object dirty = parsed.read("$['executionContext']['dirty']"); + assertThat(dirty).isExactlyInstanceOf(Boolean.class); + assertThat((Boolean) dirty).isTrue(); + Object empty = parsed.read("$['executionContext']['empty']"); + assertThat(empty).isExactlyInstanceOf(Boolean.class); + assertThat((Boolean) empty).isFalse(); + }); } /** @@ -70,15 +81,24 @@ public void testSerializationOfSingleStepExecution() throws JsonProcessingExcept final StepExecution stepExecution = getStepExecution(); final String result = objectMapper.writeValueAsString(stepExecution); - assertThat(result, not(containsString("\"executionContext\":{\"dirty\":true,\"empty\":false}"))); - assertThat(result, containsString("\"executionContext\":{\"dirty\":true,\"empty\":false,\"values\":[{")); - - assertThat(result, containsString("{\"counter\":1234}")); - assertThat(result, containsString("{\"myDouble\":1.123456}")); - assertThat(result, containsString("{\"Josh\":4444444444}")); - assertThat(result, containsString("{\"awesomeString\":\"Yep\"}")); - assertThat(result, containsString("{\"hello\":\"world\"")); - assertThat(result, containsString("{\"counter2\":9999}")); + DocumentContext parsed = JsonPath.parse(result); + Object dirty = parsed.read("$['executionContext']['dirty']"); + assertThat(dirty).isExactlyInstanceOf(Boolean.class); + assertThat((Boolean) dirty).isTrue(); + Object empty = parsed.read("$['executionContext']['empty']"); + assertThat(empty).isExactlyInstanceOf(Boolean.class); + assertThat((Boolean) empty).isFalse(); + Object values = parsed.read("$['executionContext']['values']", List.class); + assertThat(values).isInstanceOf(List.class); + Map valueMap = ((List>) values).stream() + .flatMap(map -> map.entrySet().stream()) + .collect(Collectors.toMap(o -> o.getKey(), o -> o.getValue())); + assertThat(valueMap).containsEntry("counter", 1234); + assertThat(valueMap).containsEntry("myDouble", 1.123456); + assertThat(valueMap).containsEntry("Josh", 4444444444L); + assertThat(valueMap).containsEntry("awesomeString", "Yep"); + assertThat(valueMap).containsEntry("hello", "world"); + assertThat(valueMap).containsEntry("counter2", 9999); } private StepExecution getStepExecution() { diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtilsTests.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtilsTests.java index 06fc3b4cc4..6e813805fd 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtilsTests.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtilsTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2020 the original author or authors. + * Copyright 2016-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,89 +25,86 @@ import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.util.FileCopyUtils; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.Matchers.hasKey; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.entry; +import static org.assertj.core.api.Assertions.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Tests for {@link DeploymentPropertiesUtils}. * * @author Janne Valkealahti * @author Christian Tzolov * @author Ilayaperumal Gopinathan + * @author Glenn Renfro + * @author Corneil du Plessis */ public class DeploymentPropertiesUtilsTests { private static void assertArrays(String[] left, String[] right) { ArrayList params = new ArrayList<>(Arrays.asList(left)); - assertThat(DeploymentPropertiesUtils.removeQuoting(params), containsInAnyOrder(right)); + assertThat(DeploymentPropertiesUtils.removeQuoting(params)).contains(right); } @Test public void testDeploymentPropertiesParsing() { Map props = DeploymentPropertiesUtils.parse("app.foo.bar=v, app.foo.wizz=v2 , deployer.foo" + ".pot=fern, app.other.key = value , deployer.other.cow = meww, scheduler.other.key = baz"); - assertThat(props, hasEntry("app.foo.bar", "v")); - assertThat(props, hasEntry("app.other.key", "value")); - assertThat(props, hasEntry("app.foo.wizz", "v2")); - assertThat(props, hasEntry("deployer.foo.pot", "fern")); - assertThat(props, hasEntry("deployer.other.cow", "meww")); - assertThat(props, hasEntry("scheduler.other.key", "baz")); + assertThat(props.entrySet()).contains(entry("app.foo.bar", "v")); + assertThat(props.entrySet()).contains(entry("app.other.key", "value")); + assertThat(props.entrySet()).contains(entry("app.foo.wizz", "v2")); + assertThat(props.entrySet()).contains(entry("deployer.foo.pot", "fern")); + assertThat(props.entrySet()).contains(entry("deployer.other.cow", "meww")); + assertThat(props.entrySet()).contains(entry("scheduler.other.key", "baz")); props = DeploymentPropertiesUtils.parse("app.f=v"); - assertThat(props, hasEntry("app.f", "v")); + assertThat(props.entrySet()).contains(entry("app.f", "v")); props = DeploymentPropertiesUtils.parse("app.foo1=bar1,app.foo2=bar2,app.foo3=bar3,xxx3"); - assertThat(props, hasEntry("app.foo1", "bar1")); - assertThat(props, hasEntry("app.foo2", "bar2")); - assertThat(props, hasEntry("app.foo3", "bar3,xxx3")); + assertThat(props.entrySet()).contains(entry("app.foo1", "bar1")); + assertThat(props.entrySet()).contains(entry("app.foo2", "bar2")); + assertThat(props.entrySet()).contains(entry("app.foo3", "bar3,xxx3")); props = DeploymentPropertiesUtils.parse("deployer.foo1 = bar1 , app.foo2= bar2, deployer.foo3 = bar3,xxx3"); - assertThat(props, hasEntry("deployer.foo1", "bar1")); - assertThat(props, hasEntry("app.foo2", "bar2")); - assertThat(props, hasEntry("deployer.foo3", "bar3,xxx3")); + assertThat(props.entrySet()).contains(entry("deployer.foo1", "bar1")); + assertThat(props.entrySet()).contains(entry("app.foo2", "bar2")); + assertThat(props.entrySet()).contains(entry("deployer.foo3", "bar3,xxx3")); props = DeploymentPropertiesUtils.parse("app.*.count=1"); - assertThat(props, hasEntry("app.*.count", "1")); + assertThat(props.entrySet()).contains(entry("app.*.count", "1")); props = DeploymentPropertiesUtils.parse("app.*.my-count=1"); - assertThat(props, hasEntry("app.*.my-count", "1")); + assertThat(props.entrySet()).contains(entry("app.*.my-count", "1")); props = DeploymentPropertiesUtils.parse("app.transform.producer.partitionKeyExpression=fakeExpression('xxx')"); - assertThat(props, hasEntry("app.transform.producer.partitionKeyExpression", "fakeExpression('xxx')")); + assertThat(props.entrySet()).contains(entry("app.transform.producer.partitionKeyExpression", "fakeExpression('xxx')")); try { DeploymentPropertiesUtils.parse("invalidkeyvalue"); fail("Illegal Argument Exception expected."); } catch (Exception e) { - assertTrue(e.getMessage().equals("Only deployment property keys starting with 'app.' or 'scheduler' or 'deployer.' or 'version.' allowed.")); + assertThat(e.getMessage()).isEqualTo("Only deployment property keys starting with 'app.' or 'scheduler' or 'deployer.' or 'version.' allowed. Not invalidkeyvalue"); } props = DeploymentPropertiesUtils.parse("deployer.foo=bar,invalidkeyvalue2"); - assertThat(props.size(), is(1)); - assertThat(props, hasEntry("deployer.foo", "bar,invalidkeyvalue2")); + assertThat(props.size()).isEqualTo(1); + assertThat(props.entrySet()).contains(entry("deployer.foo", "bar,invalidkeyvalue2")); props = DeploymentPropertiesUtils.parse("app.foo.bar1=jee1,jee2,jee3,deployer.foo.bar2=jee4,jee5,jee6"); - assertThat(props, hasEntry("app.foo.bar1", "jee1,jee2,jee3")); - assertThat(props, hasEntry("deployer.foo.bar2", "jee4,jee5,jee6")); + assertThat(props.entrySet()).contains(entry("app.foo.bar1", "jee1,jee2,jee3")); + assertThat(props.entrySet()).contains(entry("deployer.foo.bar2", "jee4,jee5,jee6")); props = DeploymentPropertiesUtils.parse("app.foo.bar1=xxx=1,app.foo.bar2=xxx=2"); - assertThat(props, hasEntry("app.foo.bar1", "xxx=1")); - assertThat(props, hasEntry("app.foo.bar2", "xxx=2")); + assertThat(props.entrySet()).contains(entry("app.foo.bar1", "xxx=1")); + assertThat(props.entrySet()).contains(entry("app.foo.bar2", "xxx=2")); props = DeploymentPropertiesUtils.parse("app.foo.bar1=xxx=1,test=value,app.foo.bar2=xxx=2"); - assertThat(props, hasEntry("app.foo.bar1", "xxx=1,test=value")); - assertThat(props, hasEntry("app.foo.bar2", "xxx=2")); + assertThat(props.entrySet()).contains(entry("app.foo.bar1", "xxx=1,test=value")); + assertThat(props.entrySet()).contains(entry("app.foo.bar2", "xxx=2")); } @@ -127,13 +124,18 @@ public void testDeploymentPropertiesParsing2() { fail("Illegal Argument Exception expected."); } catch (Exception e) { - assertTrue(e.getMessage().equals("Only deployment property keys starting with 'app.' or 'scheduler' or 'deployer.' or 'version.' allowed.")); + assertThat(e.getMessage()).isEqualTo("Only deployment property keys starting with 'app.' or 'scheduler' or 'deployer.' or 'version.' allowed. Not a=b"); } props = DeploymentPropertiesUtils.parseArgumentList("a=b c=d", " "); assertTrue(props.contains("c=d")); assertTrue(props.contains("a=b")); + props = DeploymentPropertiesUtils.parseArgumentList("a=b c=d ", " "); + + assertTrue(props.contains("a=b")); + assertTrue(props.contains("c=d")); + props = DeploymentPropertiesUtils.parseArgumentList("foo1=bar1 foo2=bar2 foo3=bar3 xxx3", " "); assertTrue(props.contains("foo1=bar1")); assertTrue(props.contains("foo2=bar2")); @@ -157,13 +159,51 @@ public void parseArgumentTestsWithQuotes() { assertTrue(props.contains("--foo=bar")); } + @Test + public void parseArgumentTestsWithMultipleQuotes() { + + List props = DeploymentPropertiesUtils.parseArgumentList("arg2=\"Argument 2\" arg3=val3", " "); + assertTrue(props.contains("arg2=\"Argument 2\"")); + assertTrue(props.contains("arg3=val3")); + + props = DeploymentPropertiesUtils.parseArgumentList("arg0=val0 arg1=val1 arg2=\"Argument 2\" arg3=val3", " "); + assertTrue(props.contains("arg0=val0")); + assertTrue(props.contains("arg1=val1")); + assertTrue(props.contains("arg2=\"Argument 2\"")); + assertTrue(props.contains("arg3=val3")); + + props = DeploymentPropertiesUtils.parseArgumentList("-arg1=val1 arg2=\"Argument 2\" arg3=val3", " "); + assertTrue(props.contains("-arg1=val1")); + assertTrue(props.contains("arg2=\"Argument 2\"")); + assertTrue(props.contains("arg3=val3")); + + props = DeploymentPropertiesUtils.parseArgumentList("-arg1=val1 arg2=\"Argument 2\" arg3=val3 arg4=\"Argument 4\"", " "); + assertTrue(props.contains("-arg1=val1")); + assertTrue(props.contains("arg2=\"Argument 2\"")); + assertTrue(props.contains("arg3=val3")); + assertTrue(props.contains("arg4=\"Argument 4\"")); + + props = DeploymentPropertiesUtils.parseArgumentList("-arg1=val1 arg2=\"Argument 2\" arg3=\"val3\" arg4=\"Argument 4\"", " "); + assertTrue(props.contains("-arg1=val1")); + assertTrue(props.contains("arg2=\"Argument 2\"")); + assertTrue(props.contains("arg3=\"val3\"")); + assertTrue(props.contains("arg4=\"Argument 4\"")); + + props = DeploymentPropertiesUtils.parseArgumentList("-arg1=\"val1\" arg2=\"Argument 2\" arg3=\"val3\" arg4=\"Argument 4\"", " "); + assertTrue(props.contains("-arg1=\"val1\"")); + assertTrue(props.contains("arg2=\"Argument 2\"")); + assertTrue(props.contains("arg3=\"val3\"")); + assertTrue(props.contains("arg4=\"Argument 4\"")); + + } + @Test public void testLongDeploymentPropertyValues() { Map props = DeploymentPropertiesUtils .parse("app.foo.bar=FoooooooooooooooooooooBar,app.foo" + ".bar2=FoooooooooooooooooooooBar"); - assertThat(props, hasEntry("app.foo.bar", "FoooooooooooooooooooooBar")); + assertThat(props.entrySet()).contains(entry("app.foo.bar", "FoooooooooooooooooooooBar")); props = DeploymentPropertiesUtils.parse("app.foo.bar=FooooooooooooooooooooooooooooooooooooooooooooooooooooBar"); - assertThat(props, hasEntry("app.foo.bar", "FooooooooooooooooooooooooooooooooooooooooooooooooooooBar")); + assertThat(props.entrySet()).contains(entry("app.foo.bar", "FooooooooooooooooooooooooooooooooooooooooooooooooooooBar")); } @Test @@ -177,10 +217,10 @@ public void testDeployerProperties() { props.put("deployer.myapp.precedence", "app"); Map result = DeploymentPropertiesUtils.extractAndQualifyDeployerProperties(props, "myapp"); - assertThat(result, hasEntry("spring.cloud.deployer.count", "2")); - assertThat(result, hasEntry("spring.cloud.deployer.foo", "bar")); - assertThat(result, hasEntry("spring.cloud.deployer.precedence", "app")); - assertThat(result, not(hasKey("app.myapp.foo"))); + assertThat(result.entrySet()).contains(entry("spring.cloud.deployer.count", "2")); + assertThat(result.entrySet()).contains(entry("spring.cloud.deployer.foo", "bar")); + assertThat(result.entrySet()).contains(entry("spring.cloud.deployer.precedence", "app")); + assertThat(result.keySet()).doesNotContain("app.myapp.foo"); } @Test @@ -194,10 +234,10 @@ public void testDeployerPropertiesWithApp() { props.put("deployer.myapp.precedence", "app"); Map result = DeploymentPropertiesUtils.qualifyDeployerProperties(props, "myapp"); - assertThat(result, hasEntry("spring.cloud.deployer.count", "2")); - assertThat(result, hasEntry("spring.cloud.deployer.foo", "bar")); - assertThat(result, hasEntry("spring.cloud.deployer.precedence", "app")); - assertThat(result, hasKey("app.myapp.foo")); + assertThat(result.entrySet()).contains(entry("spring.cloud.deployer.count", "2")); + assertThat(result.entrySet()).contains(entry("spring.cloud.deployer.foo", "bar")); + assertThat(result.entrySet()).contains(entry("spring.cloud.deployer.precedence", "app")); + assertThat(result.keySet()).contains("app.myapp.foo"); } @Test @@ -225,11 +265,11 @@ public void testParseDeploymentProperties() throws IOException { FileCopyUtils.copy("app.foo1:\n bar1: spam".getBytes(), file); Map props = DeploymentPropertiesUtils.parseDeploymentProperties("app.foo2=bar2", file, 0); - assertThat(props.size(), is(1)); - assertThat(props.get("app.foo2"), is("bar2")); + assertThat(props.size()).isEqualTo(1); + assertThat(props.get("app.foo2")).isEqualTo("bar2"); props = DeploymentPropertiesUtils.parseDeploymentProperties("foo2=bar2", file, 1); - assertThat(props.size(), is(1)); - assertThat(props.get("app.foo1.bar1"), is("spam")); + assertThat(props.size()).isEqualTo(1); + assertThat(props.get("app.foo1.bar1")).isEqualTo("spam"); } } diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/HttpClientConfigurerTests.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/HttpClientConfigurerTests.java index 5f6b8599ac..819538001c 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/HttpClientConfigurerTests.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/HttpClientConfigurerTests.java @@ -21,15 +21,15 @@ import org.apache.http.auth.AuthScope; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.HttpClient; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.util.ReflectionUtils; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.*; /** * @author Gunnar Hillert + * @author Corneil du Plessis * @since 1.4 */ public class HttpClientConfigurerTests { @@ -70,7 +70,7 @@ public void testHttpClientWithProxyCreationWithMissingScheme() throws Exception builder.withProxyCredentials(URI.create("spring"), "spring", "cloud"); } catch (IllegalArgumentException e) { - Assert.assertEquals("The scheme component of the proxyUri must not be empty.", e.getMessage()); + assertEquals("The scheme component of the proxyUri must not be empty.", e.getMessage()); return; } fail("Expected an IllegalArgumentException to be thrown."); @@ -88,7 +88,7 @@ public void testHttpClientWithNullProxyUri() throws Exception { builder.withProxyCredentials(null, null, null); } catch (IllegalArgumentException e) { - Assert.assertEquals("The proxyUri must not be null.", e.getMessage()); + assertEquals("The proxyUri must not be null.", e.getMessage()); return; } fail("Expected an IllegalArgumentException to be thrown."); @@ -107,8 +107,8 @@ public void testThatHttpClientWithProxyIsCreatedAndHasCorrectCredentialsProvider final Field credentialsProviderField = ReflectionUtils.findField(HttpClientConfigurer.class, "credentialsProvider"); ReflectionUtils.makeAccessible(credentialsProviderField); CredentialsProvider credentialsProvider = (CredentialsProvider) credentialsProviderField.get(builder); - Assert.assertNotNull(credentialsProvider.getCredentials(new AuthScope("test.com", 80))); - Assert.assertNull(credentialsProvider.getCredentials(new AuthScope("spring.io", 80))); + assertNotNull(credentialsProvider.getCredentials(new AuthScope("test.com", 80))); + assertNull(credentialsProvider.getCredentials(new AuthScope("spring.io", 80))); } /** @@ -124,7 +124,7 @@ public void testThatHttpClientWithProxyIsCreatedAndHasCorrectCredentialsProvider final Field credentialsProviderField = ReflectionUtils.findField(HttpClientConfigurer.class, "credentialsProvider"); ReflectionUtils.makeAccessible(credentialsProviderField); CredentialsProvider credentialsProvider = (CredentialsProvider) credentialsProviderField.get(builder); - Assert.assertNotNull(credentialsProvider.getCredentials(new AuthScope("test.com", 80))); - Assert.assertNotNull(credentialsProvider.getCredentials(new AuthScope("spring.io", 80))); + assertNotNull(credentialsProvider.getCredentials(new AuthScope("test.com", 80))); + assertNotNull(credentialsProvider.getCredentials(new AuthScope("spring.io", 80))); } } diff --git a/spring-cloud-dataflow-schema-core/pom.xml b/spring-cloud-dataflow-schema-core/pom.xml new file mode 100644 index 0000000000..ac3306ec91 --- /dev/null +++ b/spring-cloud-dataflow-schema-core/pom.xml @@ -0,0 +1,87 @@ + + + 4.0.0 + + org.springframework.cloud + spring-cloud-dataflow-parent + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent + + spring-cloud-dataflow-schema-core + spring-cloud-dataflow-schema-core + Data Flow Schema Core + + jar + + true + 3.4.1 + + + + org.springframework + spring-core + + + org.springframework + spring-context + compile + + + org.springframework.cloud + spring-cloud-task-batch + + + org.springframework.hateoas + spring-hateoas + + + org.slf4j + slf4j-api + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + + diff --git a/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AggregateTaskExecution.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AggregateTaskExecution.java new file mode 100644 index 0000000000..080b25639c --- /dev/null +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AggregateTaskExecution.java @@ -0,0 +1,256 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.schema; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + + +import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.util.Assert; + +/** + * Contains the attributes of a {@link TaskExecution} as well as the name of the {@link SchemaVersionTarget}. + * + * @author Corneil du Plessis + */ +public class AggregateTaskExecution { + /** + * The unique id associated with the task execution. + */ + private long executionId; + + /** + * The parent task execution id. + */ + private Long parentExecutionId; + + /** + * The recorded exit code for the task. + */ + private Integer exitCode; + + /** + * User defined name for the task. + */ + private String taskName; + + /** + * Time of when the task was started. + */ + private Date startTime; + + /** + * Timestamp of when the task was completed/terminated. + */ + private Date endTime; + + /** + * Message returned from the task or stacktrace. + */ + private String exitMessage; + + /** + * Id assigned to the task by the platform. + * + * @since 1.1.0 + */ + private String externalExecutionId; + + /** + * Error information available upon the failure of a task. + * + * @since 1.1.0 + */ + private String errorMessage; + + private String schemaTarget; + + private String platformName; + + private String ctrTaskStatus; + /** + * The arguments that were used for this task execution. + */ + private List arguments; + + public AggregateTaskExecution() { + this.arguments = new ArrayList<>(); + } + + public AggregateTaskExecution(long executionId, Integer exitCode, String taskName, + Date startTime, Date endTime, String exitMessage, List arguments, + String errorMessage, String externalExecutionId, Long parentExecutionId, String platformName, + String ctrTaskStatus, String schemaTarget) { + + Assert.notNull(arguments, "arguments must not be null"); + this.executionId = executionId; + this.exitCode = exitCode; + this.taskName = taskName; + this.exitMessage = exitMessage; + this.arguments = new ArrayList<>(arguments); + this.startTime = (startTime != null) ? (Date) startTime.clone() : null; + this.endTime = (endTime != null) ? (Date) endTime.clone() : null; + this.errorMessage = errorMessage; + this.externalExecutionId = externalExecutionId; + this.parentExecutionId = parentExecutionId; + this.schemaTarget = schemaTarget; + this.platformName = platformName; + this.ctrTaskStatus = ctrTaskStatus; + } + + public AggregateTaskExecution(long executionId, Integer exitCode, String taskName, + Date startTime, Date endTime, String exitMessage, List arguments, + String errorMessage, String externalExecutionId, String platformName, String ctrTaskStatus, String schemaTarget) { + + this(executionId, exitCode, taskName, startTime, endTime, exitMessage, arguments, + errorMessage, externalExecutionId, null, platformName, ctrTaskStatus, schemaTarget); + } + + public long getExecutionId() { + return this.executionId; + } + + public Integer getExitCode() { + return this.exitCode; + } + + public void setExitCode(Integer exitCode) { + this.exitCode = exitCode; + } + + public String getTaskName() { + return this.taskName; + } + + public void setTaskName(String taskName) { + this.taskName = taskName; + } + + public Date getStartTime() { + return (this.startTime != null) ? (Date) this.startTime.clone() : null; + } + + public void setStartTime(Date startTime) { + this.startTime = (startTime != null) ? (Date) startTime.clone() : null; + } + + public Date getEndTime() { + return (this.endTime != null) ? (Date) this.endTime.clone() : null; + } + + public void setEndTime(Date endTime) { + this.endTime = (endTime != null) ? (Date) endTime.clone() : null; + } + + public String getExitMessage() { + return this.exitMessage; + } + + public void setExitMessage(String exitMessage) { + this.exitMessage = exitMessage; + } + + public List getArguments() { + return this.arguments; + } + + public void setArguments(List arguments) { + this.arguments = new ArrayList<>(arguments); + } + + public String getErrorMessage() { + return this.errorMessage; + } + + public void setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + } + + public String getExternalExecutionId() { + return this.externalExecutionId; + } + + public void setExternalExecutionId(String externalExecutionId) { + this.externalExecutionId = externalExecutionId; + } + + public Long getParentExecutionId() { + return this.parentExecutionId; + } + + public void setParentExecutionId(Long parentExecutionId) { + this.parentExecutionId = parentExecutionId; + } + + public String getSchemaTarget() { + return schemaTarget; + } + + public void setSchemaTarget(String schemaTarget) { + this.schemaTarget = schemaTarget; + } + + public String getPlatformName() { + return platformName; + } + + public void setPlatformName(String platformName) { + this.platformName = platformName; + } + + public String getCtrTaskStatus() { + return ctrTaskStatus; + } + + public void setCtrTaskStatus(String ctrTaskStatus) { + this.ctrTaskStatus = ctrTaskStatus; + } + + @Override + public String toString() { + return "AggregateTaskExecution{" + + "executionId=" + executionId + + ", parentExecutionId=" + parentExecutionId + + ", exitCode=" + exitCode + + ", taskName='" + taskName + '\'' + + ", startTime=" + startTime + + ", endTime=" + endTime + + ", exitMessage='" + exitMessage + '\'' + + ", externalExecutionId='" + externalExecutionId + '\'' + + ", errorMessage='" + errorMessage + '\'' + + ", schemaTarget='" + schemaTarget + '\'' + + ", platformName='" + platformName + '\'' + + ", ctrTaskStatus='" + ctrTaskStatus + '\'' + + ", arguments=" + arguments + + '}'; + } + + public TaskExecution toTaskExecution() { + return new TaskExecution(executionId, + exitCode, + taskName, + startTime, + endTime, + exitMessage, + arguments, + errorMessage, + externalExecutionId, + parentExecutionId + ); + } +} diff --git a/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersion.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersion.java new file mode 100644 index 0000000000..8aba709aab --- /dev/null +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersion.java @@ -0,0 +1,63 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.schema; + +import java.util.Arrays; + +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; + +/** + * Defines the possible schema versions that currently map to Spring {@code "Boot"}. A registered application can only support one schema version. + * + *

    Each value defines the supported Spring Boot version that represents the changes in the schemas or Spring Batch and Task.

    + * + * @author Chris Bono + * @author Corneil du Plessis + */ +@JsonSerialize(using = AppBootSchemaVersionSerializer.class) +@JsonDeserialize(using = AppBootSchemaVersionDeserializer.class) +public enum AppBootSchemaVersion { + + BOOT2("2"), + BOOT3("3"); + + private String bootVersion; + + AppBootSchemaVersion(String bootVersion) { + this.bootVersion = bootVersion; + } + + public static AppBootSchemaVersion defaultVersion() { + return BOOT2; + } + + public static AppBootSchemaVersion fromBootVersion(String bootVersion) { + return Arrays.stream(AppBootSchemaVersion.values()) + .filter((bv) -> bv.bootVersion.equals(bootVersion)) + .findFirst().orElseThrow(() -> new IllegalArgumentException("Invalid AppBootSchemaVersion: " + bootVersion)); + } + + public String getBootVersion() { + return this.bootVersion; + } + + @Override + public String toString() { + return "AppBootVersion{bootVersion='" + this.bootVersion + "'}"; + } +} diff --git a/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersionDeserializer.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersionDeserializer.java new file mode 100644 index 0000000000..4d06fab996 --- /dev/null +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersionDeserializer.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.schema; + +import java.io.IOException; + +import com.fasterxml.jackson.core.JacksonException; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; + +/** + * Deserialize AppBootSchemaVersion with Jackson + * @author Corneil du Plessis + */ +public class AppBootSchemaVersionDeserializer extends StdDeserializer { + public AppBootSchemaVersionDeserializer() { + super(AppBootSchemaVersion.class); + } + + public AppBootSchemaVersionDeserializer(Class vc) { + super(vc); + } + + @Override + public AppBootSchemaVersion deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JacksonException { + String value = jsonParser.getValueAsString(); + return value != null ? AppBootSchemaVersion.fromBootVersion(value) : null; + } +} diff --git a/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersionSerializer.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersionSerializer.java new file mode 100644 index 0000000000..1b612346ca --- /dev/null +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersionSerializer.java @@ -0,0 +1,46 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.schema; + +import java.io.IOException; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; + +/** + * Serialize AppBootSchemaVersion with Jackson + * @author Corneil du Plessis + */ +public class AppBootSchemaVersionSerializer extends StdSerializer { + public AppBootSchemaVersionSerializer() { + super(AppBootSchemaVersion.class); + } + + protected AppBootSchemaVersionSerializer(Class t) { + super(t); + } + + @Override + public void serialize(AppBootSchemaVersion appBootSchemaVersion, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { + if(appBootSchemaVersion != null) { + jsonGenerator.writeString(appBootSchemaVersion.getBootVersion()); + } else { + jsonGenerator.writeNull(); + } + } +} diff --git a/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersions.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersions.java new file mode 100644 index 0000000000..780b2990ea --- /dev/null +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersions.java @@ -0,0 +1,60 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.schema; + +import java.util.List; + +/** + * Will provide response to list all schema versions supported along with the default. + * @author Corneil du Plessis + */ +public class AppBootSchemaVersions { + private AppBootSchemaVersion defaultSchemaVersion; + private List versions; + + public AppBootSchemaVersions() { + } + + public AppBootSchemaVersions(AppBootSchemaVersion defaultSchemaVersion, List versions) { + this.defaultSchemaVersion = defaultSchemaVersion; + this.versions = versions; + } + + public AppBootSchemaVersion getDefaultSchemaVersion() { + return defaultSchemaVersion; + } + + public void setDefaultSchemaVersion(AppBootSchemaVersion defaultSchemaVersion) { + this.defaultSchemaVersion = defaultSchemaVersion; + } + + public List getVersions() { + return versions; + } + + public void setVersions(List versions) { + this.versions = versions; + } + + @Override + public String toString() { + return "AppBootSchemaVersions{" + + "defaultSchemaVersion=" + defaultSchemaVersion + + ", versions=" + versions + + '}'; + } +} diff --git a/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootVersionConverter.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootVersionConverter.java new file mode 100644 index 0000000000..f385847dbd --- /dev/null +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootVersionConverter.java @@ -0,0 +1,33 @@ +/* + * Copyright 2015-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.schema; + +import org.springframework.core.convert.converter.Converter; + +/** + * Converts strings to {@link AppBootSchemaVersion} + * + * @author Chris Bono + * @author Corneil du Plessis + */ +public class AppBootVersionConverter implements Converter { + + @Override + public AppBootSchemaVersion convert(String value) { + return value != null ? AppBootSchemaVersion.fromBootVersion(value) : null; + } +} diff --git a/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/SchemaVersionTarget.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/SchemaVersionTarget.java new file mode 100644 index 0000000000..e1ce7f9d98 --- /dev/null +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/SchemaVersionTarget.java @@ -0,0 +1,117 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.schema; + +import java.util.Objects; + +/** + * This represents the combination of BootSchemaVersion and prefixes for the various schemas. + * @author Corneil du Plessis + */ +public class SchemaVersionTarget { + private String name; + private AppBootSchemaVersion schemaVersion; + private String taskPrefix; + private String batchPrefix; + private String datasource; + + public SchemaVersionTarget() { + } + + public SchemaVersionTarget(String name, AppBootSchemaVersion schemaVersion, String taskPrefix, String batchPrefix, String datasource) { + this.name = name; + this.schemaVersion = schemaVersion; + this.taskPrefix = taskPrefix; + this.batchPrefix = batchPrefix; + this.datasource = datasource; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public AppBootSchemaVersion getSchemaVersion() { + return schemaVersion == null ? AppBootSchemaVersion.defaultVersion() : schemaVersion; + } + + public void setSchemaVersion(AppBootSchemaVersion schemaVersion) { + this.schemaVersion = schemaVersion; + } + + public String getTaskPrefix() { + return taskPrefix; + } + + public void setTaskPrefix(String taskPrefix) { + this.taskPrefix = taskPrefix; + } + + public String getBatchPrefix() { + return batchPrefix; + } + + public void setBatchPrefix(String batchPrefix) { + this.batchPrefix = batchPrefix; + } + + public String getDatasource() { + return datasource; + } + + public void setDatasource(String datasource) { + this.datasource = datasource; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + SchemaVersionTarget that = (SchemaVersionTarget) o; + + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return name != null ? name.hashCode() : 0; + } + public static SchemaVersionTarget createDefault(AppBootSchemaVersion schemaVersion) { + if(schemaVersion.equals(AppBootSchemaVersion.defaultVersion())) { + return new SchemaVersionTarget(schemaVersion.name().toLowerCase(), schemaVersion, "TASK_", "BATCH_", null); + } + return new SchemaVersionTarget(schemaVersion.name().toLowerCase(), schemaVersion, schemaVersion.name() + "_TASK_", schemaVersion.name() + "_BATCH_", null); + } + public static SchemaVersionTarget defaultTarget() { + return createDefault(AppBootSchemaVersion.defaultVersion()); + } + + @Override + public String toString() { + return "SchemaVersionTarget{" + + "name='" + name + '\'' + + ", schemaVersion=" + schemaVersion + + ", taskPrefix='" + taskPrefix + '\'' + + ", batchPrefix='" + batchPrefix + '\'' + + ", datasource='" + datasource + '\'' + + '}'; + } +} diff --git a/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/SchemaVersionTargets.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/SchemaVersionTargets.java new file mode 100644 index 0000000000..92c3a75a25 --- /dev/null +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/SchemaVersionTargets.java @@ -0,0 +1,57 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.schema; + +import java.util.List; + +/** + * Will be the response to provide list of schema targets along with the name of the default. + * @author Corneil du Plessis + */ +public class SchemaVersionTargets { + private String defaultSchemaTarget; + private List schemas; + + public SchemaVersionTargets(String defaultSchemaTarget, List schemas) { + this.defaultSchemaTarget = defaultSchemaTarget; + this.schemas = schemas; + } + + public String getDefaultSchemaTarget() { + return defaultSchemaTarget; + } + + public void setDefaultSchemaTarget(String defaultSchemaTarget) { + this.defaultSchemaTarget = defaultSchemaTarget; + } + + public List getSchemas() { + return schemas; + } + + public void setSchemas(List schemas) { + this.schemas = schemas; + } + + @Override + public String toString() { + return "SchemaVersionTargets{" + + "defaultSchemaTarget='" + defaultSchemaTarget + '\'' + + ", schemas=" + schemas + + '}'; + } +} diff --git a/spring-cloud-dataflow-schema-core/src/test/java/org/springframework/cloud/schema/AppBootSchemaVersionTests.java b/spring-cloud-dataflow-schema-core/src/test/java/org/springframework/cloud/schema/AppBootSchemaVersionTests.java new file mode 100644 index 0000000000..6aa2b89422 --- /dev/null +++ b/spring-cloud-dataflow-schema-core/src/test/java/org/springframework/cloud/schema/AppBootSchemaVersionTests.java @@ -0,0 +1,62 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.schema; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.NullAndEmptySource; +import org.junit.jupiter.params.provider.ValueSource; + +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; + +/** + * Unit tests for {@link AppBootSchemaVersion}. + * + * @author Chris Bono + * @author Corneil du Plessis + */ +public class AppBootSchemaVersionTests { + + @Test + void bootVersion2() { + assertThat(AppBootSchemaVersion.BOOT2.getBootVersion()).isEqualTo("2"); + } + + @Test + void bootVersion3() { + assertThat(AppBootSchemaVersion.BOOT3.getBootVersion()).isEqualTo("3"); + } + + @Test + void fromBootVersionWithValidValues() { + assertThat(AppBootSchemaVersion.fromBootVersion("2")).isEqualTo(AppBootSchemaVersion.BOOT2); + assertThat(AppBootSchemaVersion.fromBootVersion("3")).isEqualTo(AppBootSchemaVersion.BOOT3); + assertThat(AppBootSchemaVersion.defaultVersion()).isEqualTo(AppBootSchemaVersion.fromBootVersion(AppBootSchemaVersion.defaultVersion().getBootVersion())); + } + + @ParameterizedTest + @NullAndEmptySource + @ValueSource(strings = { "Boot2", "boot2", "BOOT2", "foo", "Boot3", "boot3", "BOOT3" }) + void fromBootVersionWithInvalidValues(String invalidBootVersion) { + assertThatIllegalArgumentException() + .isThrownBy(() -> AppBootSchemaVersion.fromBootVersion(invalidBootVersion)) + .withMessage("Invalid AppBootSchemaVersion: %s", invalidBootVersion); + } +} diff --git a/spring-cloud-dataflow-schema/pom.xml b/spring-cloud-dataflow-schema/pom.xml new file mode 100644 index 0000000000..41e9ef881b --- /dev/null +++ b/spring-cloud-dataflow-schema/pom.xml @@ -0,0 +1,96 @@ + + + 4.0.0 + + org.springframework.cloud + spring-cloud-dataflow-parent + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent + + spring-cloud-dataflow-schema + spring-cloud-dataflow-schema + Data Flow Schema + + jar + + true + 3.4.1 + + + + org.springframework + spring-core + + + org.springframework + spring-context + compile + + + org.springframework.cloud + spring-cloud-task-batch + + + org.springframework.cloud + spring-cloud-dataflow-schema-core + ${project.version} + + + org.springframework.hateoas + spring-hateoas + + + org.slf4j + slf4j-api + + + javax.annotation + javax.annotation-api + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + + diff --git a/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/SchemaService.java b/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/SchemaService.java new file mode 100644 index 0000000000..00cdaa9c3d --- /dev/null +++ b/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/SchemaService.java @@ -0,0 +1,33 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.schema.service; + +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersions; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.SchemaVersionTargets; + +/** + * Schema service provides information about Spring Boot schema versions along with all targets and defaults. + * @author Corneil du Plessis + */ +public interface SchemaService { + AppBootSchemaVersions getVersions(); + + SchemaVersionTargets getTargets(); + + SchemaVersionTarget getTarget(String name); +} diff --git a/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/SchemaServiceConfiguration.java b/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/SchemaServiceConfiguration.java new file mode 100644 index 0000000000..109ddee147 --- /dev/null +++ b/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/SchemaServiceConfiguration.java @@ -0,0 +1,49 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.schema.service; + +import javax.annotation.PostConstruct; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.schema.service.impl.DefaultSchemaService; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * Configuration for schema service and related components. + * @author Corneil du Plessis + */ +@Configuration +public class SchemaServiceConfiguration { + private static final Logger logger = LoggerFactory.getLogger(SchemaServiceConfiguration.class); + @Bean + public SchemaService schemaService() { + logger.info("schemaService:starting"); + try { + return new DefaultSchemaService(); + } finally { + logger.info("schemaService:started"); + } + } + + @PostConstruct + public void setup() { + logger.info("created: org.springframework.cloud.dataflow.schema.service.SchemaServiceConfiguration"); + } +} diff --git a/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/impl/DefaultSchemaService.java b/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/impl/DefaultSchemaService.java new file mode 100644 index 0000000000..9cae739434 --- /dev/null +++ b/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/impl/DefaultSchemaService.java @@ -0,0 +1,76 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.schema.service.impl; + +import javax.annotation.PostConstruct; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersions; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.SchemaVersionTargets; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.stereotype.Service; + +/** + * Implements a simple service to provide Schema versions and targets. + * In the future this will use a database to store the {@link SchemaVersionTarget} + * @author Corneil du Plessis + */ +public class DefaultSchemaService implements SchemaService { + private static final Logger logger = LoggerFactory.getLogger(DefaultSchemaService.class); + private final Map targets; + + public DefaultSchemaService() { + targets = Arrays.stream(AppBootSchemaVersion.values()) + .map(SchemaVersionTarget::createDefault) + .collect(Collectors.toMap(SchemaVersionTarget::getName, Function.identity())); + } + + @Override + public AppBootSchemaVersions getVersions() { + return new AppBootSchemaVersions(AppBootSchemaVersion.defaultVersion(), Arrays.asList(AppBootSchemaVersion.values())); + } + + @Override + public SchemaVersionTargets getTargets() { + return new SchemaVersionTargets(getDefaultSchemaTarget(), new ArrayList<>(targets.values())); + } + + private static String getDefaultSchemaTarget() { + return AppBootSchemaVersion.defaultVersion().name().toLowerCase(); + } + + @Override + public SchemaVersionTarget getTarget(String name) { + if (name == null) { + name = getDefaultSchemaTarget(); + } + return targets.get(name); + } + @PostConstruct + public void setup() { + logger.info("created: org.springframework.cloud.dataflow.schema.service.impl.DefaultSchemaService"); + } +} diff --git a/spring-cloud-dataflow-schema/src/test/java/org/springframework/cloud/schema/service/impl/DefaultSchemaServiceTests.java b/spring-cloud-dataflow-schema/src/test/java/org/springframework/cloud/schema/service/impl/DefaultSchemaServiceTests.java new file mode 100644 index 0000000000..463441df7a --- /dev/null +++ b/spring-cloud-dataflow-schema/src/test/java/org/springframework/cloud/schema/service/impl/DefaultSchemaServiceTests.java @@ -0,0 +1,80 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.schema.service.impl; + + +import java.util.HashSet; + + +import org.junit.jupiter.api.Test; + +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersions; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.SchemaVersionTargets; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.schema.service.impl.DefaultSchemaService; + +import static org.assertj.core.api.Assertions.assertThat; + +/** + * Tests targeting {@link SchemaService} and the implementation {@link DefaultSchemaService} + * @author Corneil du Plessis + */ +public class DefaultSchemaServiceTests { + + protected SchemaService schemaService = new DefaultSchemaService(); + @Test + public void testVersions() { + // when + AppBootSchemaVersions versions = schemaService.getVersions(); + // then + assertThat(versions).isNotNull(); + assertThat(versions.getDefaultSchemaVersion()).isEqualTo(AppBootSchemaVersion.defaultVersion()); + assertThat(versions.getVersions().size()).isEqualTo(AppBootSchemaVersion.values().length); + assertThat(new HashSet<>(versions.getVersions()).size()).isEqualTo(AppBootSchemaVersion.values().length); + } + @Test + public void testTargets() { + // when + SchemaVersionTargets targets = schemaService.getTargets(); + // then + assertThat(targets).isNotNull(); + assertThat(targets.getDefaultSchemaTarget()).isEqualTo(AppBootSchemaVersion.defaultVersion().name().toLowerCase()); + assertThat(targets.getSchemas().size()).isEqualTo(AppBootSchemaVersion.values().length); + for(final AppBootSchemaVersion schemaVersion: AppBootSchemaVersion.values()) { + assertThat(targets.getSchemas().stream().filter(t -> t.getSchemaVersion() == schemaVersion).findFirst()).isPresent(); + } + } + @Test + public void testBoot3Target() { + // when + SchemaVersionTarget target = schemaService.getTarget("boot3"); + // then + assertThat(target).isNotNull(); + assertThat(target.getSchemaVersion()).isEqualTo(AppBootSchemaVersion.BOOT3); + assertThat(target.getBatchPrefix()).isEqualTo("BOOT3_BATCH_"); + assertThat(target.getTaskPrefix()).isEqualTo("BOOT3_TASK_"); + } + @Test + public void testInvalidTarget() { + // when + SchemaVersionTarget target = schemaService.getTarget("1"); + // then + assertThat(target).isNull(); + } +} diff --git a/spring-cloud-dataflow-server-core/pom.xml b/spring-cloud-dataflow-server-core/pom.xml index 13ae1378ff..bbb0c7a885 100644 --- a/spring-cloud-dataflow-server-core/pom.xml +++ b/spring-cloud-dataflow-server-core/pom.xml @@ -1,13 +1,22 @@ - + 4.0.0 org.springframework.cloud spring-cloud-dataflow-parent - 2.9.2-SNAPSHOT + 2.11.6-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-server-core + spring-cloud-dataflow-server-core + Data Flow Server Core + jar + + true + 3.4.1 + io.micrometer @@ -25,10 +34,29 @@ io.micrometer.prometheus prometheus-rsocket-spring - + + org.hibernate + hibernate-micrometer + + + org.springframework.cloud + spring-cloud-dataflow-schema + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-dataflow-aggregate-task + ${dataflow.version} + org.springframework.cloud spring-cloud-dataflow-common-flyway + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-dataflow-common-persistence + ${dataflow.version} com.jayway.jsonpath @@ -39,6 +67,10 @@ com.zaxxer HikariCP + + org.springdoc + springdoc-openapi-ui + org.springframework.boot spring-boot-starter-data-jpa @@ -46,14 +78,17 @@ org.springframework.cloud spring-cloud-dataflow-configuration-metadata + ${project.version} org.springframework.cloud spring-cloud-dataflow-completion + ${project.version} org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.cloud @@ -62,10 +97,12 @@ org.springframework.cloud spring-cloud-dataflow-rest-resource + ${project.version} org.springframework.cloud spring-cloud-dataflow-registry + ${project.version} org.springframework.cloud @@ -85,13 +122,8 @@ org.springframework.cloud - spring-cloud-starter-common-security-config-web - - - org.codehaus.jackson - jackson-mapper-asl - - + spring-cloud-common-security-config-web + ${project.version} org.springframework.boot @@ -120,9 +152,10 @@ - org.springframework.boot - spring-boot-starter-test - test + com.h2database + h2 + true + provided org.springframework.cloud @@ -131,20 +164,11 @@ org.springframework.cloud spring-cloud-task-batch - test - - - org.skyscreamer - jsonassert - test - - - com.h2database - h2 org.mariadb.jdbc mariadb-java-client + [3.1.2,) org.postgresql @@ -169,14 +193,17 @@ org.springframework.cloud spring-cloud-skipper-client + ${project.version} org.springframework.cloud spring-cloud-skipper + ${project.version} org.springframework.cloud spring-cloud-dataflow-audit + ${project.version} compile @@ -184,11 +211,36 @@ jsr305 provided + + org.springframework.boot + spring-boot-starter-test + test + org.springframework.batch spring-batch-test test + + org.awaitility + awaitility + test + + + org.testcontainers + junit-jupiter + test + + + org.testcontainers + mariadb + test + + + org.testcontainers + postgresql + test + @@ -207,5 +259,50 @@ + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + build-info + + + + + + io.github.git-commit-id + git-commit-id-maven-plugin + + diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/AllInOneExecutionContextSerializer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/AllInOneExecutionContextSerializer.java new file mode 100644 index 0000000000..f855e3cc44 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/AllInOneExecutionContextSerializer.java @@ -0,0 +1,65 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.batch; + +import org.apache.commons.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.repository.dao.Jackson2ExecutionContextStringSerializer; + +import java.io.*; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; + +/** + * Implements the same logic as used in Batch 5.x + * @author Corneil du Plessis + */ +public class AllInOneExecutionContextSerializer extends Jackson2ExecutionContextStringSerializer { + private final static Logger logger = LoggerFactory.getLogger(AllInOneExecutionContextSerializer.class); + @SuppressWarnings({"unchecked", "NullableProblems"}) + @Override + public Map deserialize(InputStream inputStream) throws IOException { + ByteArrayOutputStream buffer = new ByteArrayOutputStream(); + IOUtils.copy(inputStream, buffer); + Map result = new HashMap<>(); + // Try Jackson + try { + return super.deserialize(new ByteArrayInputStream(buffer.toByteArray())); + } catch (Throwable x) { + result.put("context.deserialize.error.jackson", x.toString()); + } + InputStream decodingStream = new ByteArrayInputStream(buffer.toByteArray()); + try { + // Try decode base64 + decodingStream = Base64.getDecoder().wrap(decodingStream); + } catch (Throwable x) { + // Use original input for java deserialization + decodingStream = new ByteArrayInputStream(buffer.toByteArray()); + result.put("context.deserialize.error.base64.decode", x.toString()); + } + try { + ObjectInputStream objectInputStream = new ObjectInputStream(decodingStream); + return (Map) objectInputStream.readObject(); + } catch (Throwable x) { + result.put("context.deserialize.error.java.deserialization", x.toString()); + } + // They may have a custom serializer or custom classes. + logger.warn("deserialization failed:{}", result); + return result; + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/BatchVersion.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/BatchVersion.java new file mode 100644 index 0000000000..9afd95f24c --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/BatchVersion.java @@ -0,0 +1,37 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.batch; + +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.util.Assert; + +/** + * Provides enumeration of Batch Schema versions needed to be supported. + * @author Corneil du Plessis + */ +public enum BatchVersion { + BATCH_4, + BATCH_5; + public static BatchVersion from(AppBootSchemaVersion bootSchemaVersion) { + Assert.notNull(bootSchemaVersion, "bootSchemaVersion required"); + return AppBootSchemaVersion.BOOT3.equals(bootSchemaVersion) ? BATCH_5 : BATCH_4; + } + public static BatchVersion from(SchemaVersionTarget versionTarget) { + Assert.notNull(versionTarget, "versionTarget required"); + return from(versionTarget.getSchemaVersion()); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java index fdb84fd86c..4f02b6817d 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java @@ -17,59 +17,71 @@ import java.sql.ResultSet; import java.sql.SQLException; +import java.sql.Timestamp; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; - +import java.util.Set; import javax.sql.DataSource; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobInstance; +import org.springframework.batch.core.JobParameter; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.repository.dao.JdbcJobExecutionDao; import org.springframework.batch.item.database.Order; import org.springframework.batch.item.database.PagingQueryProvider; import org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean; +import org.springframework.cloud.dataflow.server.converter.StringToDateConverter; +import org.springframework.cloud.dataflow.server.repository.support.SchemaUtilities; +import org.springframework.core.convert.support.ConfigurableConversionService; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.jdbc.core.JdbcOperations; import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowCallbackHandler; import org.springframework.jdbc.core.RowMapper; import org.springframework.jdbc.support.incrementer.AbstractDataFieldMaxValueIncrementer; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; /** * @author Dave Syer * @author Michael Minella * @author Glenn Renfro + * @author Corneil du Plessis * */ public class JdbcSearchableJobExecutionDao extends JdbcJobExecutionDao implements SearchableJobExecutionDao { + private static final String FIND_PARAMS_FROM_ID_5 = "SELECT JOB_EXECUTION_ID, PARAMETER_NAME, PARAMETER_TYPE, PARAMETER_VALUE, IDENTIFYING FROM %PREFIX%JOB_EXECUTION_PARAMS WHERE JOB_EXECUTION_ID = ?"; + private static final String GET_COUNT = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION"; - private static final String GET_COUNT_BY_JOB_NAME = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I " - + "where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and I.JOB_NAME=?"; + private static final String GET_COUNT_BY_JOB_NAME = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E " + + "JOIN %PREFIX%JOB_INSTANCE I ON E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID where I.JOB_NAME=?"; - private static final String GET_COUNT_BY_STATUS = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I " - + "where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and E.STATUS = ?"; + private static final String GET_COUNT_BY_STATUS = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E " + + "JOIN %PREFIX%JOB_INSTANCE I ON E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID where E.STATUS = ?"; - private static final String GET_COUNT_BY_JOB_NAME_AND_STATUS = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I " - + "where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and I.JOB_NAME=? AND E.STATUS = ?"; + private static final String GET_COUNT_BY_JOB_NAME_AND_STATUS = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E " + + "JOIN %PREFIX%JOB_INSTANCE I ON E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID where I.JOB_NAME=? AND E.STATUS = ?"; private static final String FIELDS = "E.JOB_EXECUTION_ID, E.START_TIME, E.END_TIME, E.STATUS, E.EXIT_CODE, E.EXIT_MESSAGE, " + "E.CREATE_TIME, E.LAST_UPDATED, E.VERSION, I.JOB_INSTANCE_ID, I.JOB_NAME"; - private static final String FIELDS_WITH_STEP_COUNT = FIELDS + - ", (SELECT COUNT(*) FROM %PREFIX%STEP_EXECUTION S WHERE S.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID) as STEP_COUNT"; - + private static final String FIELDS_WITH_STEP_COUNT = FIELDS + + ", (SELECT COUNT(*) FROM %PREFIX%STEP_EXECUTION S WHERE S.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID) as STEP_COUNT"; private static final String GET_RUNNING_EXECUTIONS = "SELECT " + FIELDS - + " from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I " - + "where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and E.END_TIME is NULL"; + + " from %PREFIX%JOB_EXECUTION E JOIN %PREFIX%JOB_INSTANCE I ON E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID where E.END_TIME is NULL"; private static final String NAME_FILTER = "I.JOB_NAME LIKE ?"; @@ -81,10 +93,35 @@ public class JdbcSearchableJobExecutionDao extends JdbcJobExecutionDao implement private static final String NAME_AND_STATUS_FILTER = "I.JOB_NAME LIKE ? AND E.STATUS = ?"; - private static final String TASK_EXECUTION_ID_FILTER = - "B.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID AND B.TASK_EXECUTION_ID = ?"; + private static final String TASK_EXECUTION_ID_FILTER = "B.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID AND B.TASK_EXECUTION_ID = ?"; + + private static final String FIND_JOB_EXECUTIONS_4 = "SELECT JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, CREATE_TIME, LAST_UPDATED, VERSION, JOB_CONFIGURATION_LOCATION" + + " from %PREFIX%JOB_EXECUTION where JOB_INSTANCE_ID = ? order by JOB_EXECUTION_ID desc"; + + private static final String FIND_JOB_EXECUTIONS_5 = "SELECT JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, CREATE_TIME, LAST_UPDATED, VERSION" + + " from %PREFIX%JOB_EXECUTION where JOB_INSTANCE_ID = ? order by JOB_EXECUTION_ID desc"; + + private static final String GET_LAST_EXECUTION_4 = "SELECT JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, CREATE_TIME, LAST_UPDATED, VERSION, JOB_CONFIGURATION_LOCATION" + + " from %PREFIX%JOB_EXECUTION E where JOB_INSTANCE_ID = ? and JOB_EXECUTION_ID in (SELECT max(JOB_EXECUTION_ID) from %PREFIX%JOB_EXECUTION E2 where E2.JOB_INSTANCE_ID = ?)"; + + private static final String GET_LAST_EXECUTION_5 = "SELECT JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, CREATE_TIME, LAST_UPDATED, VERSION" + + " from %PREFIX%JOB_EXECUTION E where JOB_INSTANCE_ID = ? and JOB_EXECUTION_ID in (SELECT max(JOB_EXECUTION_ID) from %PREFIX%JOB_EXECUTION E2 where E2.JOB_INSTANCE_ID = ?)"; - private static final String FROM_CLAUSE_TASK_TASK_BATCH = "TASK_TASK_BATCH B"; + private static final String GET_RUNNING_EXECUTIONS_4 = "SELECT E.JOB_EXECUTION_ID, E.START_TIME, E.END_TIME, E.STATUS, E.EXIT_CODE, E.EXIT_MESSAGE, E.CREATE_TIME, E.LAST_UPDATED, E.VERSION, " + + "E.JOB_INSTANCE_ID, E.JOB_CONFIGURATION_LOCATION from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and I.JOB_NAME=? and E.START_TIME is not NULL and E.END_TIME is NULL order by E.JOB_EXECUTION_ID desc"; + + private static final String GET_RUNNING_EXECUTIONS_5 = "SELECT E.JOB_EXECUTION_ID, E.START_TIME, E.END_TIME, E.STATUS, E.EXIT_CODE, E.EXIT_MESSAGE, E.CREATE_TIME, E.LAST_UPDATED, E.VERSION, " + + "E.JOB_INSTANCE_ID from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and I.JOB_NAME=? and E.START_TIME is not NULL and E.END_TIME is NULL order by E.JOB_EXECUTION_ID desc"; + + private static final String GET_EXECUTION_BY_ID_4 = "SELECT JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, CREATE_TIME, LAST_UPDATED, VERSION, JOB_CONFIGURATION_LOCATION" + + " from %PREFIX%JOB_EXECUTION where JOB_EXECUTION_ID = ?"; + + private static final String GET_EXECUTION_BY_ID_5 = "SELECT JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, CREATE_TIME, LAST_UPDATED, VERSION" + + " from %PREFIX%JOB_EXECUTION where JOB_EXECUTION_ID = ?"; + + private static final String FROM_CLAUSE_TASK_TASK_BATCH = "%TASK_PREFIX%TASK_BATCH B"; + + private static final String GET_JOB_EXECUTIONS_BY_TASK_IDS = "SELECT JOB_EXECUTION_ID, TASK_EXECUTION_ID from %TASK_PREFIX%TASK_BATCH WHERE TASK_EXECUTION_ID in (?)"; private PagingQueryProvider allExecutionsPagingQueryProvider; @@ -92,7 +129,7 @@ public class JdbcSearchableJobExecutionDao extends JdbcJobExecutionDao implement private PagingQueryProvider byStatusPagingQueryProvider; - private PagingQueryProvider byJobNameAndStatusPagingQueryProvider; + private PagingQueryProvider byJobNameAndStatusPagingQueryProvider; private PagingQueryProvider byJobNameWithStepCountPagingQueryProvider; @@ -104,8 +141,24 @@ public class JdbcSearchableJobExecutionDao extends JdbcJobExecutionDao implement private PagingQueryProvider byTaskExecutionIdWithStepCountPagingQueryProvider; + private final ConfigurableConversionService conversionService; + private DataSource dataSource; + private BatchVersion batchVersion; + private String taskTablePrefix; + + public JdbcSearchableJobExecutionDao() { + this(BatchVersion.BATCH_4); + } + + @SuppressWarnings("deprecation") + public JdbcSearchableJobExecutionDao(BatchVersion batchVersion) { + this.batchVersion = batchVersion; + conversionService = new DefaultConversionService(); + conversionService.addConverter(new StringToDateConverter()); + } + /** * @param dataSource the dataSource to set */ @@ -113,6 +166,10 @@ public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; } + public void setTaskTablePrefix(String taskTablePrefix) { + this.taskTablePrefix = taskTablePrefix; + } + /** * @see JdbcJobExecutionDao#afterPropertiesSet() */ @@ -142,12 +199,64 @@ protected long getNextKey() { byJobInstanceIdWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, null, JOB_INSTANCE_ID_FILTER); byTaskExecutionIdWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, - FROM_CLAUSE_TASK_TASK_BATCH, TASK_EXECUTION_ID_FILTER); + getTaskQuery(FROM_CLAUSE_TASK_TASK_BATCH), TASK_EXECUTION_ID_FILTER); super.afterPropertiesSet(); } + protected String getTaskQuery(String base) { + return StringUtils.replace(base, "%TASK_PREFIX%", taskTablePrefix); + } + @Override + public List findJobExecutions(JobInstance job) { + Assert.notNull(job, "Job cannot be null."); + Assert.notNull(job.getId(), "Job Id cannot be null."); + + String sqlQuery = batchVersion.equals(BatchVersion.BATCH_4) ? FIND_JOB_EXECUTIONS_4 : FIND_JOB_EXECUTIONS_5; + return getJdbcTemplate().query(getQuery(sqlQuery), new JobExecutionRowMapper(batchVersion, job), job.getId()); + + } + + @Override + public JobExecution getLastJobExecution(JobInstance jobInstance) { + Long id = jobInstance.getId(); + String sqlQuery = batchVersion.equals(BatchVersion.BATCH_4) ? GET_LAST_EXECUTION_4 : GET_LAST_EXECUTION_5; + List executions = getJdbcTemplate().query(getQuery(sqlQuery), + new JobExecutionRowMapper(batchVersion, jobInstance), id, id); + + Assert.state(executions.size() <= 1, "There must be at most one latest job execution"); + + if (executions.isEmpty()) { + return null; + } + else { + return executions.get(0); + } + } + + @Override + public Set findRunningJobExecutions(String jobName) { + Set result = new HashSet<>(); + String sqlQuery = batchVersion.equals(BatchVersion.BATCH_4) ? GET_RUNNING_EXECUTIONS_4 + : GET_RUNNING_EXECUTIONS_5; + getJdbcTemplate().query(getQuery(sqlQuery), new JobExecutionRowMapper(batchVersion), jobName); + + return result; + } + + @Override + public JobExecution getJobExecution(Long executionId) { + try { + String sqlQuery = batchVersion.equals(BatchVersion.BATCH_4) ? GET_EXECUTION_BY_ID_4 : GET_EXECUTION_BY_ID_5; + return getJdbcTemplate().queryForObject(getQuery(sqlQuery), new JobExecutionRowMapper(batchVersion), + executionId); + } + catch (EmptyResultDataAccessException e) { + return null; + } + } + /** * @return a {@link PagingQueryProvider} for all job executions * @throws Exception if page provider is not created. @@ -157,8 +266,8 @@ private PagingQueryProvider getPagingQueryProvider() throws Exception { } /** - * @return a {@link PagingQueryProvider} for all job executions with the - * provided where clause + * @return a {@link PagingQueryProvider} for all job executions with the provided + * where clause * @throws Exception if page provider is not created. */ private PagingQueryProvider getPagingQueryProvider(String whereClause) throws Exception { @@ -166,8 +275,7 @@ private PagingQueryProvider getPagingQueryProvider(String whereClause) throws Ex } /** - * @return a {@link PagingQueryProvider} with a where clause to narrow the - * query + * @return a {@link PagingQueryProvider} with a where clause to narrow the query * @throws Exception if page provider is not created. */ private PagingQueryProvider getPagingQueryProvider(String fromClause, String whereClause) throws Exception { @@ -175,16 +283,16 @@ private PagingQueryProvider getPagingQueryProvider(String fromClause, String whe } /** - * @return a {@link PagingQueryProvider} with a where clause to narrow the - * query + * @return a {@link PagingQueryProvider} with a where clause to narrow the query * @throws Exception if page provider is not created. */ - private PagingQueryProvider getPagingQueryProvider(String fields, String fromClause, String whereClause) throws Exception { + private PagingQueryProvider getPagingQueryProvider(String fields, String fromClause, String whereClause) + throws Exception { SqlPagingQueryProviderFactoryBean factory = new SqlPagingQueryProviderFactoryBean(); factory.setDataSource(dataSource); fromClause = "%PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I" + (fromClause == null ? "" : ", " + fromClause); factory.setFromClause(getQuery(fromClause)); - if(fields == null) { + if (fields == null) { fields = FIELDS; } factory.setSelectClause(getQuery(fields)); @@ -226,15 +334,16 @@ public int countJobExecutions(BatchStatus status) { */ @Override public int countJobExecutions(String jobName, BatchStatus status) { - return getJdbcTemplate().queryForObject(getQuery(GET_COUNT_BY_JOB_NAME_AND_STATUS), Integer.class, jobName, status.name()); + return getJdbcTemplate().queryForObject(getQuery(GET_COUNT_BY_JOB_NAME_AND_STATUS), Integer.class, jobName, + status.name()); } /** * @see SearchableJobExecutionDao#getJobExecutionsWithStepCount(Date, Date, int, int) */ @Override - public List getJobExecutionsWithStepCount(Date fromDate, - Date toDate, int start, int count) { + public List getJobExecutionsWithStepCount(Date fromDate, Date toDate, int start, + int count) { if (start <= 0) { return getJdbcTemplate().query(byDateRangeWithStepCountPagingQueryProvider.generateFirstPageQuery(count), @@ -244,7 +353,8 @@ public List getJobExecutionsWithStepCount(Date fromDa Long startAfterValue = getJdbcTemplate().queryForObject( byDateRangeWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, fromDate, toDate); - return getJdbcTemplate().query(byDateRangeWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), + return getJdbcTemplate().query( + byDateRangeWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), new JobExecutionStepCountRowMapper(), fromDate, toDate, startAfterValue); } catch (IncorrectResultSizeDataAccessException e) { @@ -253,17 +363,19 @@ public List getJobExecutionsWithStepCount(Date fromDa } @Override - public List getJobExecutionsWithStepCountFilteredByJobInstanceId( - int jobInstanceId, int start, int count) { + public List getJobExecutionsWithStepCountFilteredByJobInstanceId(int jobInstanceId, + int start, int count) { if (start <= 0) { - return getJdbcTemplate().query(byJobInstanceIdWithStepCountPagingQueryProvider.generateFirstPageQuery(count), + return getJdbcTemplate().query( + byJobInstanceIdWithStepCountPagingQueryProvider.generateFirstPageQuery(count), new JobExecutionStepCountRowMapper(), jobInstanceId); } try { Long startAfterValue = getJdbcTemplate().queryForObject( byJobInstanceIdWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobInstanceId); - return getJdbcTemplate().query(byJobInstanceIdWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), + return getJdbcTemplate().query( + byJobInstanceIdWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), new JobExecutionStepCountRowMapper(), jobInstanceId, startAfterValue); } catch (IncorrectResultSizeDataAccessException e) { @@ -272,17 +384,19 @@ public List getJobExecutionsWithStepCountFilteredByJo } @Override - public List getJobExecutionsWithStepCountFilteredByTaskExecutionId( - int taskExecutionId, int start, int count) { + public List getJobExecutionsWithStepCountFilteredByTaskExecutionId(int taskExecutionId, + int start, int count) { if (start <= 0) { - return getJdbcTemplate().query(byTaskExecutionIdWithStepCountPagingQueryProvider.generateFirstPageQuery(count), - new JobExecutionStepCountRowMapper(), taskExecutionId); + return getJdbcTemplate().query(SchemaUtilities.getQuery( + byTaskExecutionIdWithStepCountPagingQueryProvider.generateFirstPageQuery(count), + this.getTablePrefix()), new JobExecutionStepCountRowMapper(), taskExecutionId); } try { Long startAfterValue = getJdbcTemplate().queryForObject( byTaskExecutionIdWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, taskExecutionId); - return getJdbcTemplate().query(byTaskExecutionIdWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), + return getJdbcTemplate().query( + byTaskExecutionIdWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), new JobExecutionStepCountRowMapper(), taskExecutionId, startAfterValue); } catch (IncorrectResultSizeDataAccessException e) { @@ -295,7 +409,7 @@ public List getJobExecutionsWithStepCountFilteredByTa */ @Override public Collection getRunningJobExecutions() { - return getJdbcTemplate().query(getQuery(GET_RUNNING_EXECUTIONS), new JobExecutionRowMapper()); + return getJdbcTemplate().query(getQuery(GET_RUNNING_EXECUTIONS), new SearchableJobExecutionRowMapper()); } /** @@ -303,52 +417,53 @@ public Collection getRunningJobExecutions() { */ @Override public List getJobExecutions(String jobName, BatchStatus status, int start, int count) { - if (start <= 0) { - return getJdbcTemplate().query(byJobNameAndStatusPagingQueryProvider.generateFirstPageQuery(count), - new JobExecutionRowMapper(), jobName, status.name()); - } - try { - Long startAfterValue = getJdbcTemplate().queryForObject( - byJobNameAndStatusPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName, status.name()); - return getJdbcTemplate().query(byJobNameAndStatusPagingQueryProvider.generateRemainingPagesQuery(count), - new JobExecutionRowMapper(), jobName, status.name(), startAfterValue); - } - catch (IncorrectResultSizeDataAccessException e) { - return Collections.emptyList(); - } - } - - /** - * @see SearchableJobExecutionDao#getJobExecutions(String, int, int) - */ - @Override - public List getJobExecutions(String jobName, int start, int count) { - if (start <= 0) { - return getJdbcTemplate().query(byJobNamePagingQueryProvider.generateFirstPageQuery(count), - new JobExecutionRowMapper(), jobName); - } - try { - Long startAfterValue = getJdbcTemplate().queryForObject( - byJobNamePagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName); - return getJdbcTemplate().query(byJobNamePagingQueryProvider.generateRemainingPagesQuery(count), - new JobExecutionRowMapper(), jobName, startAfterValue); - } - catch (IncorrectResultSizeDataAccessException e) { - return Collections.emptyList(); - } - } + if (start <= 0) { + return getJdbcTemplate().query(byJobNameAndStatusPagingQueryProvider.generateFirstPageQuery(count), + new SearchableJobExecutionRowMapper(), jobName, status.name()); + } + try { + Long startAfterValue = getJdbcTemplate().queryForObject( + byJobNameAndStatusPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName, + status.name()); + return getJdbcTemplate().query(byJobNameAndStatusPagingQueryProvider.generateRemainingPagesQuery(count), + new SearchableJobExecutionRowMapper(), jobName, status.name(), startAfterValue); + } + catch (IncorrectResultSizeDataAccessException e) { + return Collections.emptyList(); + } + } + + /** + * @see SearchableJobExecutionDao#getJobExecutions(String, int, int) + */ + @Override + public List getJobExecutions(String jobName, int start, int count) { + if (start <= 0) { + return getJdbcTemplate().query(byJobNamePagingQueryProvider.generateFirstPageQuery(count), + new SearchableJobExecutionRowMapper(), jobName); + } + try { + Long startAfterValue = getJdbcTemplate().queryForObject( + byJobNamePagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName); + return getJdbcTemplate().query(byJobNamePagingQueryProvider.generateRemainingPagesQuery(count), + new SearchableJobExecutionRowMapper(), jobName, startAfterValue); + } + catch (IncorrectResultSizeDataAccessException e) { + return Collections.emptyList(); + } + } @Override public List getJobExecutions(BatchStatus status, int start, int count) { if (start <= 0) { return getJdbcTemplate().query(byStatusPagingQueryProvider.generateFirstPageQuery(count), - new JobExecutionRowMapper(), status.name()); + new SearchableJobExecutionRowMapper(), status.name()); } try { Long startAfterValue = getJdbcTemplate().queryForObject( byStatusPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, status.name()); return getJdbcTemplate().query(byStatusPagingQueryProvider.generateRemainingPagesQuery(count), - new JobExecutionRowMapper(), status.name(), startAfterValue); + new SearchableJobExecutionRowMapper(), status.name(), startAfterValue); } catch (IncorrectResultSizeDataAccessException e) { return Collections.emptyList(); @@ -366,7 +481,8 @@ public List getJobExecutionsWithStepCount(String jobN } try { Long startAfterValue = getJdbcTemplate().queryForObject( - byJobNameWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName); + byJobNameWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, + jobName); return getJdbcTemplate().query(byJobNameWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), new JobExecutionStepCountRowMapper(), jobName, startAfterValue); } @@ -382,13 +498,13 @@ public List getJobExecutionsWithStepCount(String jobN public List getJobExecutions(int start, int count) { if (start <= 0) { return getJdbcTemplate().query(allExecutionsPagingQueryProvider.generateFirstPageQuery(count), - new JobExecutionRowMapper()); + new SearchableJobExecutionRowMapper()); } try { - Long startAfterValue = getJdbcTemplate().queryForObject( - allExecutionsPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class); + Long startAfterValue = getJdbcTemplate() + .queryForObject(allExecutionsPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class); return getJdbcTemplate().query(allExecutionsPagingQueryProvider.generateRemainingPagesQuery(count), - new JobExecutionRowMapper(), startAfterValue); + new SearchableJobExecutionRowMapper(), startAfterValue); } catch (IncorrectResultSizeDataAccessException e) { return Collections.emptyList(); @@ -404,7 +520,8 @@ public List getJobExecutionsWithStepCount(int start, try { Long startAfterValue = getJdbcTemplate().queryForObject( executionsWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class); - return getJdbcTemplate().query(executionsWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), + return getJdbcTemplate().query( + executionsWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), new JobExecutionStepCountRowMapper(), startAfterValue); } catch (IncorrectResultSizeDataAccessException e) { @@ -412,6 +529,25 @@ public List getJobExecutionsWithStepCount(int start, } } + @Override + public Map> getJobExecutionsByTaskIds(Collection ids) { + JdbcOperations jdbcTemplate = getJdbcTemplate(); + String strIds = StringUtils.collectionToCommaDelimitedString(ids); + + String sql = getTaskQuery(GET_JOB_EXECUTIONS_BY_TASK_IDS).replace("?", strIds); + return jdbcTemplate.query(sql, + rs -> { + final Map> results = new HashMap<>(); + while (rs.next()) { + Long taskExecutionId = rs.getLong("TASK_EXECUTION_ID"); + Long jobExecutionId = rs.getLong("JOB_EXECUTION_ID"); + Set jobs = results.computeIfAbsent(taskExecutionId, aLong -> new HashSet<>()); + jobs.add(jobExecutionId); + } + return results; + }); + } + @Override public void saveJobExecution(JobExecution jobExecution) { throw new UnsupportedOperationException("SearchableJobExecutionDao is read only"); @@ -434,9 +570,9 @@ public void updateJobExecution(JobExecution jobExecution) { * @author Glenn Renfro * */ - protected class JobExecutionRowMapper implements RowMapper { + protected class SearchableJobExecutionRowMapper implements RowMapper { - JobExecutionRowMapper() { + SearchableJobExecutionRowMapper() { } @Override @@ -445,6 +581,7 @@ public JobExecution mapRow(ResultSet rs, int rowNum) throws SQLException { } } + /** * Re-usable mapper for {@link JobExecutionWithStepCount} instances. * @@ -464,8 +601,66 @@ public JobExecutionWithStepCount mapRow(ResultSet rs, int rowNum) throws SQLExce } + protected JobParameters getJobParametersBatch5(Long executionId) { + Map map = new HashMap<>(); + RowCallbackHandler handler = rs -> { + String parameterName = rs.getString("PARAMETER_NAME"); + + Class parameterType = null; + try { + parameterType = Class.forName(rs.getString("PARAMETER_TYPE")); + } + catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + String stringValue = rs.getString("PARAMETER_VALUE"); + Object typedValue = conversionService.convert(stringValue, parameterType); + + boolean identifying = rs.getString("IDENTIFYING").equalsIgnoreCase("Y"); + + if (typedValue instanceof String) { + map.put(parameterName, new JobParameter((String) typedValue, identifying)); + } + else if (typedValue instanceof Integer) { + map.put(parameterName, new JobParameter(((Integer) typedValue).longValue(), identifying)); + } + else if (typedValue instanceof Long) { + map.put(parameterName, new JobParameter((Long) typedValue, identifying)); + } + else if (typedValue instanceof Float) { + map.put(parameterName, new JobParameter(((Float) typedValue).doubleValue(), identifying)); + } + else if (typedValue instanceof Double) { + map.put(parameterName, new JobParameter((Double) typedValue, identifying)); + } + else if (typedValue instanceof Timestamp) { + map.put(parameterName, new JobParameter(new Date(((Timestamp) typedValue).getTime()), identifying)); + } + else if (typedValue instanceof Date) { + map.put(parameterName, new JobParameter((Date) typedValue, identifying)); + } + else { + map.put(parameterName, + new JobParameter(typedValue != null ? typedValue.toString() : "null", identifying)); + } + }; + + getJdbcTemplate().query(getQuery(FIND_PARAMS_FROM_ID_5), handler, executionId); + + return new JobParameters(map); + } + + @Override + protected JobParameters getJobParameters(Long executionId) { + if (batchVersion == BatchVersion.BATCH_4) { + return super.getJobParameters(executionId); + } + else { + return getJobParametersBatch5(executionId); + } + } - JobExecution createJobExecutionFromResultSet(ResultSet rs, int rowNum) throws SQLException{ + JobExecution createJobExecutionFromResultSet(ResultSet rs, int rowNum) throws SQLException { Long id = rs.getLong(1); JobExecution jobExecution; @@ -484,4 +679,45 @@ JobExecution createJobExecutionFromResultSet(ResultSet rs, int rowNum) throws S jobExecution.setVersion(rs.getInt(9)); return jobExecution; } + + private final class JobExecutionRowMapper implements RowMapper { + + private final BatchVersion batchVersion; + + private JobInstance jobInstance; + + public JobExecutionRowMapper(BatchVersion batchVersion) { + this.batchVersion = batchVersion; + } + + public JobExecutionRowMapper(BatchVersion batchVersion, JobInstance jobInstance) { + this.batchVersion = batchVersion; + this.jobInstance = jobInstance; + } + + @Override + public JobExecution mapRow(ResultSet rs, int rowNum) throws SQLException { + Long id = rs.getLong(1); + JobParameters jobParameters = getJobParameters(id); + JobExecution jobExecution; + String jobConfigurationLocation = batchVersion.equals(BatchVersion.BATCH_4) ? rs.getString(10) : null; + if (jobInstance == null) { + jobExecution = new JobExecution(id, jobParameters, jobConfigurationLocation); + } + else { + jobExecution = new JobExecution(jobInstance, id, jobParameters, jobConfigurationLocation); + } + + jobExecution.setStartTime(rs.getTimestamp(2)); + jobExecution.setEndTime(rs.getTimestamp(3)); + jobExecution.setStatus(BatchStatus.valueOf(rs.getString(4))); + jobExecution.setExitStatus(new ExitStatus(rs.getString(5), rs.getString(6))); + jobExecution.setCreateTime(rs.getTimestamp(7)); + jobExecution.setLastUpdated(rs.getTimestamp(8)); + jobExecution.setVersion(rs.getInt(9)); + return jobExecution; + } + + } + } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobService.java index 3cfa2157ec..be1c83f2a0 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobService.java @@ -1,5 +1,5 @@ /* - * Copyright 2009-2010 the original author or authors. + * Copyright 2009-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,8 @@ import java.util.Collection; import java.util.Date; +import java.util.Map; +import java.util.Set; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.Job; @@ -45,7 +47,7 @@ * * @author Dave Syer * @author Glenn Renfro - * + * @author Corneil du Plessis */ public interface JobService { @@ -63,6 +65,7 @@ public interface JobService { * @throws JobInstanceAlreadyCompleteException thrown if job was already complete * @throws JobParametersInvalidException thrown if job parameters are invalid */ + @Deprecated JobExecution launch(String jobName, JobParameters params) throws NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException; @@ -75,6 +78,7 @@ JobExecution launch(String jobName, JobParameters params) throws NoSuchJobExcept * * @throws NoSuchJobException thrown if job specified does not exist */ + @Deprecated JobParameters getLastJobParameters(String jobName) throws NoSuchJobException; /** @@ -90,6 +94,7 @@ JobExecution launch(String jobName, JobParameters params) throws NoSuchJobExcept * @throws JobInstanceAlreadyCompleteException thrown if job was already complete * @throws JobParametersInvalidException thrown if job parameters are invalid */ + @Deprecated JobExecution restart(Long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, NoSuchJobException, JobParametersInvalidException; @@ -108,6 +113,7 @@ JobExecution restart(Long jobExecutionId) throws NoSuchJobExecutionException, Jo * @throws JobInstanceAlreadyCompleteException thrown if job was already complete * @throws JobParametersInvalidException thrown if job parameters are invalid */ + @Deprecated JobExecution restart(Long jobExecutionId, JobParameters params) throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, NoSuchJobException, JobParametersInvalidException; @@ -138,6 +144,7 @@ JobExecution restart(Long jobExecutionId, JobParameters params) * @throws JobExecutionAlreadyRunningException thrown if the job is running (it should be * stopped first) */ + @Deprecated JobExecution abandon(Long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException; /** @@ -149,6 +156,7 @@ JobExecution restart(Long jobExecutionId, JobParameters params) * @param count the maximum number of job names to return * @return a collection of job names */ + @Deprecated Collection listJobs(int start, int count); /** @@ -156,6 +164,7 @@ JobExecution restart(Long jobExecutionId, JobParameters params) * * @return the total number of jobs */ + @Deprecated int countJobs(); /** @@ -177,6 +186,8 @@ JobExecution restart(Long jobExecutionId, JobParameters params) * @return a collection of {@link JobInstance job instances} * @throws NoSuchJobException thrown if job specified does not exist */ + + @Deprecated Collection listJobInstances(String jobName, int start, int count) throws NoSuchJobException; /** @@ -187,6 +198,7 @@ JobExecution restart(Long jobExecutionId, JobParameters params) * @return the number of job instances available * @throws NoSuchJobException thrown if job specified does not exist */ + @Deprecated int countJobInstances(String jobName) throws NoSuchJobException; /** @@ -199,6 +211,7 @@ JobExecution restart(Long jobExecutionId, JobParameters params) * @return a collection of {@link JobExecutionWithStepCount} * @throws NoSuchJobException thrown if job specified does not exist */ + @Deprecated Collection listJobExecutionsForJobWithStepCount(String jobName, int start, int count) throws NoSuchJobException; @@ -210,6 +223,7 @@ Collection listJobExecutionsForJobWithStepCount(Strin * @return the number of executions * @throws NoSuchJobException thrown if job specified does not exist */ + @Deprecated int countJobExecutionsForJob(String jobName, BatchStatus status) throws NoSuchJobException; /** @@ -222,6 +236,7 @@ Collection listJobExecutionsForJobWithStepCount(Strin * @return all the job executions * @throws NoSuchJobException thrown if job specified does not exist */ + @Deprecated Collection getJobExecutionsForJobInstance(String jobName, Long jobInstanceId) throws NoSuchJobException; @@ -233,6 +248,7 @@ Collection getJobExecutionsForJobInstance(String jobName, Long job * @param count the maximum number of executions * @return a collection of {@link JobExecution} */ + @Deprecated Collection listJobExecutions(int start, int count); /** @@ -243,6 +259,7 @@ Collection getJobExecutionsForJobInstance(String jobName, Long job * @param count the maximum number of executions * @return a collection of {@link JobExecutionWithStepCount} */ + @Deprecated Collection listJobExecutionsWithStepCount(int start, int count); /** @@ -251,6 +268,7 @@ Collection getJobExecutionsForJobInstance(String jobName, Long job * * @return the number of job executions in the job repository */ + @Deprecated int countJobExecutions(); /** @@ -272,7 +290,8 @@ Collection getJobExecutionsForJobInstance(String jobName, Long job * @throws NoSuchJobExecutionException thrown if job execution specified does not exist */ Collection getStepExecutions(Long jobExecutionId) throws NoSuchJobExecutionException; - + Collection getStepExecutions(JobExecution jobExecution) throws NoSuchJobExecutionException; + void addStepExecutions(JobExecution jobExecution); /** * List the {@link StepExecution step executions} for a step in descending order of * creation (usually close to execution order). @@ -304,6 +323,7 @@ Collection listStepExecutionsForStep(String jobName, String stepN * * @return the number of executions. */ + @Deprecated int countStepExecutionsForJobExecution(long jobExecutionId); /** @@ -319,12 +339,14 @@ Collection listStepExecutionsForStep(String jobName, String stepN */ StepExecution getStepExecution(Long jobExecutionId, Long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException; + StepExecution getStepExecution(JobExecution jobExecution, Long stepExecutionId) throws NoSuchStepExecutionException; /** * Send a stop signal to all running job executions. * * @return the number of executions affected */ + @Deprecated int stopAll(); /** @@ -335,6 +357,7 @@ StepExecution getStepExecution(Long jobExecutionId, Long stepExecutionId) throws * @return {@link Collection} of step names. * @throws NoSuchJobException thrown if the job name cannot be located */ + @Deprecated Collection getStepNamesForJob(String jobName) throws NoSuchJobException; /** @@ -361,6 +384,7 @@ Collection listJobExecutionsForJob(String jobName, BatchStatus sta * @param count the maximum number of executions to return * @return a collection of {@link JobExecutionWithStepCount} */ + @Deprecated Collection listJobExecutionsForJobWithStepCount(Date fromDate, Date toDate, int start, int count); @@ -373,6 +397,7 @@ Collection listJobExecutionsForJobWithStepCount(Date * @param count the maximum number of executions to return * @return a collection of {@link JobExecutionWithStepCount} */ + @Deprecated Collection listJobExecutionsForJobWithStepCountFilteredByJobInstanceId(int jobInstanceId, int start, int count); /** @@ -384,5 +409,13 @@ Collection listJobExecutionsForJobWithStepCount(Date * @param count the maximum number of executions to return * @return a collection of {@link JobExecutionWithStepCount} */ + @Deprecated Collection listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId(int taskExecutionId, int start, int count); + + /** + * Returns a collection job execution ids given a collection of task execution ids that is mapped by id. + * @param taskExecutionId + * @return + */ + Map> getJobExecutionIdsByTaskExecutionIds(Collection taskExecutionId); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SearchableJobExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SearchableJobExecutionDao.java index 9fdf66da96..83f619e9b2 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SearchableJobExecutionDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SearchableJobExecutionDao.java @@ -18,6 +18,8 @@ import java.util.Collection; import java.util.Date; import java.util.List; +import java.util.Map; +import java.util.Set; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; @@ -25,7 +27,7 @@ /** * @author Dave Syer - * + * @author Corneil du Plessis */ public interface SearchableJobExecutionDao extends JobExecutionDao { @@ -99,6 +101,11 @@ public interface SearchableJobExecutionDao extends JobExecutionDao { */ List getJobExecutionsWithStepCount(int start, int count); + /** + * @param ids the set of task execution ids. + * @return Map with the TaskExecution id as the key and the set of job execution ids as values. + */ + Map> getJobExecutionsByTaskIds(Collection ids); /** * Gets count of job executions. * diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java index cf509e0fcb..0c35174e52 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java @@ -1,5 +1,5 @@ /* - * Copyright 2009-2019 the original author or authors. + * Copyright 2009-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,6 +15,7 @@ */ package org.springframework.cloud.dataflow.server.batch; +import javax.batch.operations.JobOperator; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -24,11 +25,11 @@ import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; +import java.util.Objects; import java.util.Properties; import java.util.Set; -import javax.batch.operations.JobOperator; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -46,9 +47,12 @@ import org.springframework.batch.core.repository.dao.ExecutionContextDao; import org.springframework.batch.core.step.NoSuchStepException; import org.springframework.beans.factory.DisposableBean; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.server.repository.AggregateJobQueryDao; import org.springframework.core.io.Resource; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; @@ -59,7 +63,7 @@ * @author Dave Syer * @author Michael Minella * @author Glenn Renfro - * + * @author Corneil du Plessis */ public class SimpleJobService implements JobService, DisposableBean { @@ -82,22 +86,28 @@ public class SimpleJobService implements JobService, DisposableBean { private JobOperator jsrJobOperator; + private final AggregateJobQueryDao aggregateJobQueryDao; + private int shutdownTimeout = DEFAULT_SHUTDOWN_TIMEOUT; + private final SchemaVersionTarget schemaVersionTarget; + public SimpleJobService(SearchableJobInstanceDao jobInstanceDao, SearchableJobExecutionDao jobExecutionDao, - SearchableStepExecutionDao stepExecutionDao, JobRepository jobRepository, - ExecutionContextDao executionContextDao, JobOperator jsrJobOperator) { + SearchableStepExecutionDao stepExecutionDao, JobRepository jobRepository, + ExecutionContextDao executionContextDao, JobOperator jsrJobOperator, AggregateJobQueryDao aggregateJobQueryDao, + SchemaVersionTarget schemaVersionTarget) { super(); this.jobInstanceDao = jobInstanceDao; this.jobExecutionDao = jobExecutionDao; this.stepExecutionDao = stepExecutionDao; this.jobRepository = jobRepository; this.executionContextDao = executionContextDao; + this.aggregateJobQueryDao = aggregateJobQueryDao; + this.schemaVersionTarget = schemaVersionTarget; if (jsrJobOperator == null) { logger.warn("No JobOperator compatible with JSR-352 was provided."); - } - else { + } else { this.jsrJobOperator = jsrJobOperator; } } @@ -118,11 +128,20 @@ public Collection getStepExecutions(Long jobExecutionId) throws N if (jobExecution == null) { throw new NoSuchJobExecutionException("No JobExecution with id=" + jobExecutionId); } + return getStepExecutions(jobExecution); - stepExecutionDao.addStepExecutions(jobExecution); + } + @Override + public Collection getStepExecutions(JobExecution jobExecution) { + Assert.notNull(jobExecution, "jobExecution required"); + stepExecutionDao.addStepExecutions(jobExecution); return jobExecution.getStepExecutions(); + } + @Override + public void addStepExecutions(JobExecution jobExecution) { + stepExecutionDao.addStepExecutions(jobExecution); } /** @@ -131,7 +150,6 @@ public Collection getStepExecutions(Long jobExecutionId) throws N * * @param jobExecutionId the job execution to restart * @return Instance of {@link JobExecution} associated with the restart. - * * @throws NoSuchJobException thrown if job does not exist */ @Override @@ -148,12 +166,10 @@ public JobExecution restart(Long jobExecutionId, JobParameters params) throws No if (jsrJobOperator != null) { if (params != null) { jobExecution = new JobExecution(jsrJobOperator.restart(jobExecutionId, params.toProperties())); - } - else { + } else { jobExecution = new JobExecution(jsrJobOperator.restart(jobExecutionId, new Properties())); } - } - else { + } else { throw new NoSuchJobException(String.format("Can't find job associated with job execution id %s to restart", String.valueOf(jobExecutionId))); } @@ -167,8 +183,7 @@ public JobExecution launch(String jobName, JobParameters jobParameters) throws N if (jsrJobOperator != null) { jobExecution = new JobExecution(jsrJobOperator.start(jobName, jobParameters.toProperties())); - } - else { + } else { throw new NoSuchJobException(String.format("Unable to find job %s to launch", String.valueOf(jobName))); } @@ -227,8 +242,7 @@ private Collection getJsrJobNames() { String jobXmlFileName = resource.getFilename(); jsr352JobNames.add(jobXmlFileName.substring(0, jobXmlFileName.length() - 4)); } - } - catch (IOException e) { + } catch (IOException e) { logger.debug("Unable to list JSR-352 batch jobs", e); } @@ -248,8 +262,7 @@ public int stopAll() { for (JobExecution jobExecution : result) { if (jsrJobOperator != null && jsrJobNames.contains(jobExecution.getJobInstance().getJobName())) { jsrJobOperator.stop(jobExecution.getId()); - } - else { + } else { jobExecution.stop(); jobRepository.update(jobExecution); } @@ -272,8 +285,7 @@ public JobExecution stop(Long jobExecutionId) throws NoSuchJobExecutionException if (jsrJobOperator != null && jsrJobNames.contains(jobExecution.getJobInstance().getJobName())) { jsrJobOperator.stop(jobExecutionId); jobExecution = getJobExecution(jobExecutionId); - } - else { + } else { jobExecution.stop(); jobRepository.update(jobExecution); } @@ -299,8 +311,7 @@ public JobExecution abandon(Long jobExecutionId) throws NoSuchJobExecutionExcept if (jsrJobOperator != null && jsrJobNames.contains(jobInstance.getJobName())) { jsrJobOperator.abandon(jobExecutionId); jobExecution = getJobExecution(jobExecutionId); - } - else { + } else { jobExecution.upgradeStatus(BatchStatus.ABANDONED); jobExecution.setEndTime(new Date()); jobRepository.update(jobExecution); @@ -316,19 +327,16 @@ public int countJobExecutionsForJob(String name, BatchStatus status) throws NoSu } private int countJobExecutions(String jobName, BatchStatus status) throws NoSuchJobException { - if (StringUtils.isEmpty(jobName)) { - if (status != null) { - return jobExecutionDao.countJobExecutions(status); - } - } - else { - if (status != null) { - return jobExecutionDao.countJobExecutions(jobName, status); + if (!StringUtils.hasText(jobName)) { + if (status == null) { + throw new IllegalArgumentException("One of jobName or status must be specified"); } + return jobExecutionDao.countJobExecutions(status); } - checkJobExists(jobName); - return jobExecutionDao.countJobExecutions(jobName); + return (status != null) ? + jobExecutionDao.countJobExecutions(jobName, status) : + jobExecutionDao.countJobExecutions(jobName); } @Override @@ -338,18 +346,13 @@ public int countJobInstances(String name) { @Override public JobExecution getJobExecution(Long jobExecutionId) throws NoSuchJobExecutionException { - JobExecution jobExecution = jobExecutionDao.getJobExecution(jobExecutionId); - if (jobExecution == null) { - throw new NoSuchJobExecutionException("There is no JobExecution with id=" + jobExecutionId); - } - jobExecution.setJobInstance(jobInstanceDao.getJobInstance(jobExecution)); + JobExecution jobExecution = this.aggregateJobQueryDao.getJobExecution(jobExecutionId, this.schemaVersionTarget.getName()).getJobExecution(); + jobExecution.setJobInstance(Objects.requireNonNull(this.jobInstanceDao.getJobInstance(jobExecution))); try { - jobExecution.setExecutionContext(executionContextDao.getExecutionContext(jobExecution)); - } - catch (Exception e) { - logger.info("Cannot load execution context for job execution: " + jobExecution); + jobExecution.setExecutionContext(this.executionContextDao.getExecutionContext(jobExecution)); + } catch (Exception e) { + this.logger.info("Cannot load execution context for job execution: " + jobExecution); } - stepExecutionDao.addStepExecutions(jobExecution); return jobExecution; } @@ -357,8 +360,7 @@ public JobExecution getJobExecution(Long jobExecutionId) throws NoSuchJobExecuti public Collection getJobExecutionsForJobInstance(String name, Long jobInstanceId) throws NoSuchJobException { checkJobExists(name); - List jobExecutions = jobExecutionDao.findJobExecutions(jobInstanceDao - .getJobInstance(jobInstanceId)); + List jobExecutions = jobExecutionDao.findJobExecutions(Objects.requireNonNull(jobInstanceDao.getJobInstance(jobInstanceId))); for (JobExecution jobExecution : jobExecutions) { stepExecutionDao.addStepExecutions(jobExecution); } @@ -369,15 +371,19 @@ public Collection getJobExecutionsForJobInstance(String name, Long public StepExecution getStepExecution(Long jobExecutionId, Long stepExecutionId) throws NoSuchJobExecutionException, NoSuchStepExecutionException { JobExecution jobExecution = getJobExecution(jobExecutionId); + return getStepExecution(jobExecution, stepExecutionId); + } + + @Override + public StepExecution getStepExecution(JobExecution jobExecution, Long stepExecutionId) throws NoSuchStepExecutionException { StepExecution stepExecution = stepExecutionDao.getStepExecution(jobExecution, stepExecutionId); if (stepExecution == null) { - throw new NoSuchStepExecutionException("There is no StepExecution with jobExecutionId=" + jobExecutionId + throw new NoSuchStepExecutionException("There is no StepExecution with jobExecutionId=" + jobExecution.getId() + " and id=" + stepExecutionId); } try { stepExecution.setExecutionContext(executionContextDao.getExecutionContext(stepExecution)); - } - catch (Exception e) { + } catch (Exception e) { logger.info("Cannot load execution context for step execution: " + stepExecution); } return stepExecution; @@ -385,7 +391,7 @@ public StepExecution getStepExecution(Long jobExecutionId, Long stepExecutionId) @Override public Collection listJobExecutionsForJobWithStepCount(String jobName, int start, - int count) + int count) throws NoSuchJobException { checkJobExists(jobName); return jobExecutionDao.getJobExecutionsWithStepCount(jobName, start, count); @@ -412,10 +418,7 @@ public int countStepExecutionsForJobExecution(long jobExecutionId) { @Override public JobInstance getJobInstance(long jobInstanceId) throws NoSuchJobInstanceException { - JobInstance jobInstance = jobInstanceDao.getJobInstance(jobInstanceId); - if (jobInstance == null) { - throw new NoSuchJobInstanceException("JobInstance with id=" + jobInstanceId + " does not exist"); - } + JobInstance jobInstance = this.aggregateJobQueryDao.getJobInstance(jobInstanceId, this.schemaVersionTarget.getName()); return jobInstance; } @@ -438,7 +441,7 @@ public Collection getStepNamesForJob(String jobName) throws NoSuchJobExc @Override public Collection listJobExecutionsForJob(String jobName, BatchStatus status, int pageOffset, - int pageSize) { + int pageSize) { List jobExecutions = getJobExecutions(jobName, status, pageOffset, pageSize); for (JobExecution jobExecution : jobExecutions) { @@ -450,8 +453,8 @@ public Collection listJobExecutionsForJob(String jobName, BatchSta @Override public Collection listJobExecutionsForJobWithStepCount(Date fromDate, - Date toDate, int start, int count) { - return jobExecutionDao.getJobExecutionsWithStepCount(fromDate, toDate, start, count); + Date toDate, int start, int count) { + return jobExecutionDao.getJobExecutionsWithStepCount(fromDate, toDate, start, count); } @Override @@ -466,13 +469,17 @@ public Collection listJobExecutionsForJobWithStepCoun return jobExecutionDao.getJobExecutionsWithStepCountFilteredByTaskExecutionId(taskExecutionId, start, count); } + @Override + public Map> getJobExecutionIdsByTaskExecutionIds(Collection taskExecutionIds) { + return this.jobExecutionDao.getJobExecutionsByTaskIds(taskExecutionIds); + } + private List getJobExecutions(String jobName, BatchStatus status, int pageOffset, int pageSize) { if (StringUtils.isEmpty(jobName)) { if (status != null) { return jobExecutionDao.getJobExecutions(status, pageOffset, pageSize); } - } - else { + } else { if (status != null) { return jobExecutionDao.getJobExecutions(jobName, status, pageOffset, pageSize); } @@ -502,11 +509,9 @@ public void destroy() throws Exception { if (jobExecution.isRunning()) { stop(jobExecution.getId()); } - } - catch (JobExecutionNotRunningException e) { + } catch (JobExecutionNotRunningException e) { logger.info("JobExecution is not running so it cannot be stopped"); - } - catch (Exception e) { + } catch (Exception e) { logger.error("Unexpected exception stopping JobExecution", e); if (firstException == null) { firstException = e; @@ -535,12 +540,11 @@ public void destroy() throws Exception { @Scheduled(fixedDelay = 60000) public void removeInactiveExecutions() { - for (Iterator iterator = activeExecutions.iterator(); iterator.hasNext();) { + for (Iterator iterator = activeExecutions.iterator(); iterator.hasNext(); ) { JobExecution jobExecution = iterator.next(); try { jobExecution = getJobExecution(jobExecution.getId()); - } - catch (NoSuchJobExecutionException e) { + } catch (NoSuchJobExecutionException e) { logger.error("Unexpected exception loading JobExecution", e); } if (!jobExecution.isRunning()) { @@ -549,5 +553,4 @@ public void removeInactiveExecutions() { } } - } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java index 19cf8110f3..74cbe0e97a 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java @@ -1,5 +1,5 @@ /* - * Copyright 2009-2019 the original author or authors. + * Copyright 2009-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,7 +16,6 @@ package org.springframework.cloud.dataflow.server.batch; import java.sql.Types; - import javax.sql.DataSource; import org.slf4j.Logger; @@ -39,6 +38,14 @@ import org.springframework.batch.support.DatabaseType; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.InitializingBean; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.server.repository.AggregateJobQueryDao; +import org.springframework.cloud.dataflow.server.repository.JdbcAggregateJobQueryDao; +import org.springframework.cloud.dataflow.server.service.JobServiceContainer; +import org.springframework.context.EnvironmentAware; +import org.springframework.core.env.Environment; import org.springframework.jdbc.core.JdbcOperations; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.support.lob.DefaultLobHandler; @@ -52,9 +59,10 @@ * ingredients as convenient as possible. * * @author Dave Syer + * @author Corneil du Plessis * */ -public class SimpleJobServiceFactoryBean implements FactoryBean, InitializingBean { +public class SimpleJobServiceFactoryBean implements FactoryBean, InitializingBean, EnvironmentAware { private static final Logger logger = LoggerFactory.getLogger(SimpleJobServiceFactoryBean.class); @@ -65,6 +73,7 @@ public class SimpleJobServiceFactoryBean implements FactoryBean, Ini private String databaseType; private String tablePrefix = AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX; + private String taskTablePrefix = "TASK_"; private DataFieldMaxValueIncrementerFactory incrementerFactory; @@ -82,10 +91,26 @@ public class SimpleJobServiceFactoryBean implements FactoryBean, Ini private PlatformTransactionManager transactionManager; + private JobServiceContainer jobServiceContainer; + + private SchemaService schemaService; + + private SchemaVersionTarget schemaVersionTarget; + + private Environment environment; + public void setTransactionManager(PlatformTransactionManager transactionManager) { this.transactionManager = transactionManager; } + /** + * Set the schemaVersionTarget to be used by the created SimpleJobService. + * @param schemaVersionTarget the schemaVersionTarget to be associated with this service. + */ + public void setAppBootSchemaVersionTarget(SchemaVersionTarget schemaVersionTarget) { + this.schemaVersionTarget = schemaVersionTarget; + } + /** * A special handler for large objects. The default is usually fine, except for some * (usually older) versions of Oracle. The default is determined from the data base type. @@ -112,6 +137,11 @@ public void setMaxVarCharLength(int maxVarCharLength) { this.maxVarCharLength = maxVarCharLength; } + @Override + public void setEnvironment(Environment environment) { + this.environment = environment; + } + /** * Public setter for the {@link DataSource}. * @param dataSource a {@link DataSource} @@ -136,6 +166,26 @@ public void setTablePrefix(String tablePrefix) { this.tablePrefix = tablePrefix; } + public void setTaskTablePrefix(String taskTablePrefix) { + this.taskTablePrefix = taskTablePrefix; + } + + /** + * Sets the {@link JobServiceContainer} for the service. + * @param jobServiceContainer the JobServiceContainer for this service. + */ + public void setJobServiceContainer(JobServiceContainer jobServiceContainer) { + this.jobServiceContainer = jobServiceContainer; + } + + /** + * Sets the {@link SchemaService} for this factory bean. + * @param schemaService the schemaService for this factory bean. + */ + public void setSchemaService(SchemaService schemaService) { + this.schemaService = schemaService; + } + /** * A factory for incrementers (used to build primary keys for meta data). Defaults to * {@link DefaultDataFieldMaxValueIncrementerFactory}. @@ -187,7 +237,7 @@ public void afterPropertiesSet() throws Exception { jdbcTemplate = new JdbcTemplate(dataSource); if (incrementerFactory == null) { - incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); + incrementerFactory = new MultiSchemaIncrementerFactory(dataSource); } if (databaseType == null) { @@ -219,11 +269,13 @@ protected SearchableJobInstanceDao createJobInstanceDao() throws Exception { } protected SearchableJobExecutionDao createJobExecutionDao() throws Exception { - JdbcSearchableJobExecutionDao dao = new JdbcSearchableJobExecutionDao(); + BatchVersion batchVersion = BatchVersion.from(this.schemaVersionTarget); + JdbcSearchableJobExecutionDao dao = new JdbcSearchableJobExecutionDao(batchVersion); dao.setDataSource(dataSource); dao.setJobExecutionIncrementer(incrementerFactory.getIncrementer(databaseType, tablePrefix + "JOB_EXECUTION_SEQ")); dao.setTablePrefix(tablePrefix); + dao.setTaskTablePrefix(taskTablePrefix); dao.setClobTypeToUse(determineClobTypeToUse(this.databaseType)); dao.setExitMessageLength(maxVarCharLength); dao.afterPropertiesSet(); @@ -266,6 +318,10 @@ private int determineClobTypeToUse(String databaseType) { } } + protected AggregateJobQueryDao createAggregateJobQueryDao() throws Exception { + return new JdbcAggregateJobQueryDao(this.dataSource, this.schemaService, this.jobServiceContainer, this.environment); + } + /** * Create a {@link SimpleJobService} from the configuration provided. * @@ -279,7 +335,7 @@ public JobService getObject() throws Exception { transactionManager); jsrJobOperator.afterPropertiesSet(); return new SimpleJobService(createJobInstanceDao(), createJobExecutionDao(), createStepExecutionDao(), - jobRepository, createExecutionContextDao(), jsrJobOperator); + jobRepository, createExecutionContextDao(), jsrJobOperator, createAggregateJobQueryDao(), schemaVersionTarget); } /** diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/AggregateDataFlowTaskConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/AggregateDataFlowTaskConfiguration.java new file mode 100644 index 0000000000..3c000b5f8b --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/AggregateDataFlowTaskConfiguration.java @@ -0,0 +1,167 @@ +/* + * Copyright 2023-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.config; + +import javax.annotation.PostConstruct; +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; +import org.springframework.beans.BeanUtils; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskDeploymentReader; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.server.repository.AggregateJobQueryDao; +import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDao; +import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDaoContainer; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDaoContainer; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDaoContainer; +import org.springframework.cloud.dataflow.server.repository.DefaultTaskDefinitionReader; +import org.springframework.cloud.dataflow.server.repository.DefaultTaskDeploymentReader; +import org.springframework.cloud.dataflow.server.repository.JdbcAggregateJobQueryDao; +import org.springframework.cloud.dataflow.server.repository.JdbcDataflowJobExecutionDao; +import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionDao; +import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.JobExecutionDaoContainer; +import org.springframework.cloud.dataflow.server.repository.JobRepositoryContainer; +import org.springframework.cloud.dataflow.server.repository.TaskBatchDaoContainer; +import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; +import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository; +import org.springframework.cloud.dataflow.server.repository.TaskExecutionDaoContainer; +import org.springframework.cloud.dataflow.server.repository.support.SchemaUtilities; +import org.springframework.cloud.dataflow.server.service.JobExplorerContainer; +import org.springframework.cloud.dataflow.server.service.JobServiceContainer; +import org.springframework.cloud.task.configuration.TaskProperties; +import org.springframework.cloud.task.repository.support.DatabaseType; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.env.Environment; +import org.springframework.jdbc.support.MetaDataAccessException; +import org.springframework.transaction.PlatformTransactionManager; + +/** + * Configuration for DAO Containers use for multiple schema targets. + * + * @author Corneil du Plessis + */ +@Configuration +public class AggregateDataFlowTaskConfiguration { + private static final Logger logger = LoggerFactory.getLogger(AggregateDataFlowTaskConfiguration.class); + + @Bean + public DataflowJobExecutionDaoContainer dataflowJobExecutionDao(DataSource dataSource, SchemaService schemaService) { + DataflowJobExecutionDaoContainer result = new DataflowJobExecutionDaoContainer(); + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + DataflowJobExecutionDao dao = new JdbcDataflowJobExecutionDao(dataSource, target.getBatchPrefix()); + result.add(target.getName(), dao); + } + return result; + } + + @Bean + public DataflowTaskExecutionDaoContainer dataflowTaskExecutionDao(DataSource dataSource, SchemaService schemaService, TaskProperties taskProperties) { + DataflowTaskExecutionDaoContainer result = new DataflowTaskExecutionDaoContainer(); + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + TaskProperties properties = new TaskProperties(); + BeanUtils.copyProperties(taskProperties, properties); + properties.setTablePrefix(target.getTaskPrefix()); + DataflowTaskExecutionDao dao = new JdbcDataflowTaskExecutionDao(dataSource, properties); + result.add(target.getName(), dao); + } + return result; + } + + @Bean + public DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDao(DataSource dataSource, SchemaService schemaService) { + DataFieldMaxValueIncrementerFactory incrementerFactory = new MultiSchemaIncrementerFactory(dataSource); + String databaseType; + try { + databaseType = DatabaseType.fromMetaData(dataSource).name(); + } catch (MetaDataAccessException e) { + throw new IllegalStateException(e); + } + DataflowTaskExecutionMetadataDaoContainer result = new DataflowTaskExecutionMetadataDaoContainer(); + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + DataflowTaskExecutionMetadataDao dao = new JdbcDataflowTaskExecutionMetadataDao( + dataSource, + incrementerFactory.getIncrementer(databaseType, + SchemaUtilities.getQuery("%PREFIX%EXECUTION_METADATA_SEQ", target.getTaskPrefix()) + ), + target.getTaskPrefix() + ); + result.add(target.getName(), dao); + } + return result; + } + + @Bean + public TaskExecutionDaoContainer taskExecutionDaoContainer(DataSource dataSource, SchemaService schemaService) { + return new TaskExecutionDaoContainer(dataSource, schemaService); + } + + @Bean + public JobRepositoryContainer jobRepositoryContainer(DataSource dataSource, PlatformTransactionManager platformTransactionManager, SchemaService schemaService) { + return new JobRepositoryContainer(dataSource, platformTransactionManager, schemaService); + } + + @Bean + public JobExplorerContainer jobExplorerContainer(DataSource dataSource, SchemaService schemaService) { + return new JobExplorerContainer(dataSource, schemaService); + } + + @Bean + public JobServiceContainer jobServiceContainer(DataSource dataSource, PlatformTransactionManager platformTransactionManager, + SchemaService schemaService, JobRepositoryContainer jobRepositoryContainer, + JobExplorerContainer jobExplorerContainer, Environment environment) { + return new JobServiceContainer(dataSource, platformTransactionManager, schemaService, jobRepositoryContainer, + jobExplorerContainer, environment); + } + + @Bean + public JobExecutionDaoContainer jobExecutionDaoContainer(DataSource dataSource, SchemaService schemaService) { + return new JobExecutionDaoContainer(dataSource, schemaService); + } + + @Bean + @ConditionalOnMissingBean + public TaskDefinitionReader taskDefinitionReader(TaskDefinitionRepository repository) { + return new DefaultTaskDefinitionReader(repository); + } + + @Bean + @ConditionalOnMissingBean + public TaskDeploymentReader taskDeploymentReader(TaskDeploymentRepository repository) { + return new DefaultTaskDeploymentReader(repository); + } + + @Bean + public AggregateJobQueryDao aggregateJobQueryDao(DataSource dataSource, SchemaService schemaService, + JobServiceContainer jobServiceContainer, Environment environment) throws Exception { + return new JdbcAggregateJobQueryDao(dataSource, schemaService, jobServiceContainer, environment); + } + + @Bean + public TaskBatchDaoContainer taskBatchDaoContainer(DataSource dataSource, SchemaService schemaService) { + return new TaskBatchDaoContainer(dataSource, schemaService); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowControllerAutoConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowControllerAutoConfiguration.java index 304bfaba71..57284eadca 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowControllerAutoConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowControllerAutoConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,6 +27,8 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.ObjectProvider; +import org.springframework.boot.actuate.info.BuildInfoContributor; +import org.springframework.boot.actuate.info.GitInfoContributor; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; @@ -38,6 +40,9 @@ import org.springframework.cloud.common.security.core.support.OAuth2AccessTokenProvidingClientHttpRequestInterceptor; import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; import org.springframework.cloud.common.security.support.SecurityStateBean; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; import org.springframework.cloud.dataflow.audit.repository.AuditRecordRepository; import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.audit.service.DefaultAuditRecordService; @@ -53,9 +58,9 @@ import org.springframework.cloud.dataflow.rest.resource.AppRegistrationResource; import org.springframework.cloud.dataflow.rest.resource.StreamDefinitionResource; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; +import org.springframework.cloud.dataflow.schema.service.SchemaService; import org.springframework.cloud.dataflow.server.DockerValidatorProperties; import org.springframework.cloud.dataflow.server.TaskValidationController; -import org.springframework.cloud.dataflow.server.batch.JobService; import org.springframework.cloud.dataflow.server.config.apps.CommonApplicationProperties; import org.springframework.cloud.dataflow.server.config.features.ConditionalOnStreamsEnabled; import org.springframework.cloud.dataflow.server.config.features.ConditionalOnTasksEnabled; @@ -74,6 +79,7 @@ import org.springframework.cloud.dataflow.server.controller.RuntimeAppInstanceController; import org.springframework.cloud.dataflow.server.controller.RuntimeAppsController; import org.springframework.cloud.dataflow.server.controller.RuntimeStreamsController; +import org.springframework.cloud.dataflow.server.controller.SchemaController; import org.springframework.cloud.dataflow.server.controller.StreamDefinitionController; import org.springframework.cloud.dataflow.server.controller.StreamDeploymentController; import org.springframework.cloud.dataflow.server.controller.StreamLogsController; @@ -81,6 +87,7 @@ import org.springframework.cloud.dataflow.server.controller.TaskCtrController; import org.springframework.cloud.dataflow.server.controller.TaskDefinitionController; import org.springframework.cloud.dataflow.server.controller.TaskExecutionController; +import org.springframework.cloud.dataflow.server.controller.TaskExecutionThinController; import org.springframework.cloud.dataflow.server.controller.TaskLogsController; import org.springframework.cloud.dataflow.server.controller.TaskPlatformController; import org.springframework.cloud.dataflow.server.controller.TaskSchedulerController; @@ -97,6 +104,7 @@ import org.springframework.cloud.dataflow.server.job.LauncherRepository; import org.springframework.cloud.dataflow.server.repository.StreamDefinitionRepository; import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; +import org.springframework.cloud.dataflow.server.service.JobServiceContainer; import org.springframework.cloud.dataflow.server.service.LauncherService; import org.springframework.cloud.dataflow.server.service.SchedulerService; import org.springframework.cloud.dataflow.server.service.SpringSecurityAuditorAware; @@ -124,10 +132,10 @@ import org.springframework.cloud.skipper.client.SkipperClientProperties; import org.springframework.cloud.skipper.client.SkipperClientResponseErrorHandler; import org.springframework.cloud.skipper.client.util.HttpClientConfigurer; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; +import org.springframework.core.env.PropertyResolver; import org.springframework.data.jpa.repository.config.EnableJpaAuditing; import org.springframework.data.jpa.repository.config.EnableJpaRepositories; import org.springframework.hateoas.mediatype.MessageResolver; @@ -151,13 +159,14 @@ * @author Andy Clement * @author Glenn Renfro * @author Christian Tzolov + * @author Corneil du Plessis */ @SuppressWarnings("all") @Configuration @Import(CompletionConfiguration.class) -@ConditionalOnBean({ EnableDataFlowServerConfiguration.Marker.class }) -@EnableConfigurationProperties({ FeaturesProperties.class, VersionInfoProperties.class, - DockerValidatorProperties.class, DataflowMetricsProperties.class }) +@ConditionalOnBean({EnableDataFlowServerConfiguration.Marker.class}) +@EnableConfigurationProperties({FeaturesProperties.class, VersionInfoProperties.class, + DockerValidatorProperties.class, DataflowMetricsProperties.class}) @ConditionalOnProperty(prefix = "dataflow.server", name = "enabled", havingValue = "true", matchIfMissing = true) @EntityScan({ "org.springframework.cloud.dataflow.core" @@ -180,7 +189,7 @@ public RootController rootController(EntityLinks entityLinks) { @Bean public CompletionController completionController(StreamCompletionProvider completionProvider, - TaskCompletionProvider taskCompletionProvider) { + TaskCompletionProvider taskCompletionProvider) { return new CompletionController(completionProvider, taskCompletionProvider); } @@ -191,13 +200,16 @@ public ToolsController toolsController() { @Bean public AboutController aboutController(ObjectProvider streamDeployer, - ObjectProvider launcherRepository, - FeaturesProperties featuresProperties, - VersionInfoProperties versionInfoProperties, - SecurityStateBean securityStateBean, - DataflowMetricsProperties monitoringDashboardInfoProperties) { + ObjectProvider launcherRepository, + FeaturesProperties featuresProperties, + VersionInfoProperties versionInfoProperties, + SecurityStateBean securityStateBean, + DataflowMetricsProperties monitoringDashboardInfoProperties, + ObjectProvider gitInfoContributor, + ObjectProvider buildInfoContributor) { return new AboutController(streamDeployer.getIfAvailable(), launcherRepository.getIfAvailable(), - featuresProperties, versionInfoProperties, securityStateBean, monitoringDashboardInfoProperties); + featuresProperties, versionInfoProperties, securityStateBean, monitoringDashboardInfoProperties, + gitInfoContributor, buildInfoContributor); } @Bean @@ -210,6 +222,8 @@ public RestControllerAdvice restControllerAdvice() { return new RestControllerAdvice(); } + + @Configuration public static class AppRegistryConfiguration { @@ -223,13 +237,13 @@ public ForkJoinPoolFactoryBean appRegistryFJPFB() { @Bean public AppResourceCommon appResourceCommon(@Nullable MavenProperties mavenProperties, - DelegatingResourceLoader delegatingResourceLoader) { + DelegatingResourceLoader delegatingResourceLoader) { return new AppResourceCommon(mavenProperties, delegatingResourceLoader); } @Bean public AppRegistryService appRegistryService(AppRegistrationRepository appRegistrationRepository, - AppResourceCommon appResourceCommon, AuditRecordService auditRecordService) { + AppResourceCommon appResourceCommon, AuditRecordService auditRecordService) { return new DefaultAppRegistryService(appRegistrationRepository, appResourceCommon, auditRecordService); } @@ -259,13 +273,35 @@ public AppRegistrationAssemblerProvider appRegistryAssemblerProvider() { public static class TaskEnabledConfiguration { @Bean - public TaskExecutionController taskExecutionController(TaskExplorer explorer, - TaskExecutionService taskExecutionService, - TaskDefinitionRepository taskDefinitionRepository, TaskExecutionInfoService taskExecutionInfoService, - TaskDeleteService taskDeleteService, TaskJobService taskJobService) { - return new TaskExecutionController(explorer, taskExecutionService, taskDefinitionRepository, + public SchemaController schemaController(SchemaService schemaService) { + return new SchemaController(schemaService); + } + + @Bean + public TaskExecutionController taskExecutionController( + AggregateTaskExplorer explorer, + AggregateExecutionSupport aggregateExecutionSupport, + TaskExecutionService taskExecutionService, + TaskDefinitionRepository taskDefinitionRepository, + TaskDefinitionReader taskDefinitionReader, + TaskExecutionInfoService taskExecutionInfoService, + TaskDeleteService taskDeleteService, + TaskJobService taskJobService + ) { + return new TaskExecutionController(explorer, + aggregateExecutionSupport, + taskExecutionService, + taskDefinitionRepository, + taskDefinitionReader, taskExecutionInfoService, - taskDeleteService, taskJobService); + taskDeleteService, + taskJobService + ); + } + + @Bean + public TaskExecutionThinController taskExecutionThinController(AggregateTaskExplorer aggregateTaskExplorer, TaskDefinitionRepository taskDefinitionRepository, TaskJobService taskJobService) { + return new TaskExecutionThinController(aggregateTaskExplorer, taskDefinitionRepository, taskJobService); } @Bean @@ -276,15 +312,22 @@ public TaskPlatformController taskLauncherController(LauncherService launcherSer @Bean @ConditionalOnMissingBean public TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider( - TaskExecutionService taskExecutionService, TaskJobService taskJobService, TaskExplorer taskExplorer) { - return new DefaultTaskDefinitionAssemblerProvider(taskExecutionService, taskJobService, taskExplorer); + TaskExecutionService taskExecutionService, + TaskJobService taskJobService, + AggregateTaskExplorer taskExplorer, + AggregateExecutionSupport aggregateExecutionSupport + ) { + return new DefaultTaskDefinitionAssemblerProvider(taskExecutionService, taskJobService, taskExplorer, aggregateExecutionSupport); } @Bean - public TaskDefinitionController taskDefinitionController(TaskExplorer taskExplorer, - TaskDefinitionRepository repository, TaskSaveService taskSaveService, - TaskDeleteService taskDeleteService, - TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider) { + public TaskDefinitionController taskDefinitionController( + AggregateTaskExplorer taskExplorer, + TaskDefinitionRepository repository, + TaskSaveService taskSaveService, + TaskDeleteService taskDeleteService, + TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider + ) { return new TaskDefinitionController(taskExplorer, repository, taskSaveService, taskDeleteService, taskDefinitionAssemblerProvider); } @@ -305,13 +348,13 @@ public JobExecutionThinController jobExecutionThinController(TaskJobService repo } @Bean - public JobStepExecutionController jobStepExecutionController(JobService service) { - return new JobStepExecutionController(service); + public JobStepExecutionController jobStepExecutionController(JobServiceContainer jobServiceContainer) { + return new JobStepExecutionController(jobServiceContainer); } @Bean - public JobStepExecutionProgressController jobStepExecutionProgressController(JobService service) { - return new JobStepExecutionProgressController(service); + public JobStepExecutionProgressController jobStepExecutionProgressController(JobServiceContainer jobServiceContainer, TaskJobService taskJobService) { + return new JobStepExecutionProgressController(jobServiceContainer, taskJobService); } @Bean @@ -321,9 +364,9 @@ public JobInstanceController jobInstanceController(TaskJobService repository) { @Bean public TaskValidationService taskValidationService(AppRegistryService appRegistry, - DockerValidatorProperties dockerValidatorProperties, - TaskDefinitionRepository taskDefinitionRepository, - TaskConfigurationProperties taskConfigurationProperties) { + DockerValidatorProperties dockerValidatorProperties, + TaskDefinitionRepository taskDefinitionRepository, + TaskConfigurationProperties taskConfigurationProperties) { return new DefaultTaskValidationService(appRegistry, dockerValidatorProperties, taskDefinitionRepository); @@ -346,12 +389,13 @@ public LauncherService launcherService(LauncherRepository launcherRepository) { @Bean public TaskCtrController tasksCtrController(ApplicationConfigurationMetadataResolver metadataResolver, - TaskConfigurationProperties taskConfigurationProperties, - ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties, - AppResourceCommon appResourceCommon) { + TaskConfigurationProperties taskConfigurationProperties, + ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties, + AppResourceCommon appResourceCommon) { return new TaskCtrController(metadataResolver, taskConfigurationProperties, composedTaskRunnerConfigurationProperties, appResourceCommon); } + } @Configuration @@ -361,9 +405,9 @@ public static class StreamEnabledConfiguration { @Bean public StreamValidationService streamValidationService(AppRegistryService appRegistry, - DockerValidatorProperties dockerValidatorProperties, - StreamDefinitionRepository streamDefinitionRepository, - StreamDefinitionService streamDefinitionService) { + DockerValidatorProperties dockerValidatorProperties, + StreamDefinitionRepository streamDefinitionRepository, + StreamDefinitionService streamDefinitionService) { return new DefaultStreamValidationService(appRegistry, dockerValidatorProperties, streamDefinitionRepository, @@ -386,9 +430,9 @@ public StreamDefinitionAssemblerProvider streamDefinitionAssemblerProvider( @Bean @ConditionalOnMissingBean public StreamDefinitionController streamDefinitionController(StreamService streamService, - StreamDefinitionService streamDefinitionService, AppRegistryService appRegistryService, - StreamDefinitionAssemblerProvider streamDefinitionAssemblerProvider, - AppRegistrationAssemblerProvider appRegistrationAssemblerProvider) { + StreamDefinitionService streamDefinitionService, AppRegistryService appRegistryService, + StreamDefinitionAssemblerProvider streamDefinitionAssemblerProvider, + AppRegistrationAssemblerProvider appRegistrationAssemblerProvider) { return new StreamDefinitionController(streamService, streamDefinitionService, appRegistryService, streamDefinitionAssemblerProvider, appRegistrationAssemblerProvider); } @@ -430,8 +474,8 @@ public StreamDeploymentController updatableStreamDeploymentController( @Bean public SkipperClient skipperClient(SkipperClientProperties properties, - RestTemplateBuilder restTemplateBuilder, ObjectMapper objectMapper, - @Nullable OAuth2TokenUtilsService oauth2TokenUtilsService) { + RestTemplateBuilder restTemplateBuilder, ObjectMapper objectMapper, + @Nullable OAuth2TokenUtilsService oauth2TokenUtilsService) { // TODO (Tzolov) review the manual Hal convertion configuration objectMapper.registerModule(new Jackson2HalModule()); @@ -459,11 +503,11 @@ public SkipperClient skipperClient(SkipperClientProperties properties, @Bean public SkipperStreamDeployer skipperStreamDeployer(SkipperClient skipperClient, - StreamDefinitionRepository streamDefinitionRepository, - SkipperClientProperties skipperClientProperties, - AppRegistryService appRegistryService, - ForkJoinPool runtimeAppsStatusFJPFB, - StreamDefinitionService streamDefinitionService) { + StreamDefinitionRepository streamDefinitionRepository, + SkipperClientProperties skipperClientProperties, + AppRegistryService appRegistryService, + ForkJoinPool runtimeAppsStatusFJPFB, + StreamDefinitionService streamDefinitionService) { logger.info("Skipper URI [" + skipperClientProperties.getServerUri() + "]"); return new SkipperStreamDeployer(skipperClient, streamDefinitionRepository, appRegistryService, runtimeAppsStatusFJPFB, streamDefinitionService); @@ -471,11 +515,12 @@ public SkipperStreamDeployer skipperStreamDeployer(SkipperClient skipperClient, @Bean public AppDeploymentRequestCreator streamDeploymentPropertiesUtils(AppRegistryService appRegistry, - CommonApplicationProperties commonApplicationProperties, - ApplicationConfigurationMetadataResolver applicationConfigurationMetadataResolver, - StreamDefinitionService streamDefinitionService) { + CommonApplicationProperties commonApplicationProperties, + ApplicationConfigurationMetadataResolver applicationConfigurationMetadataResolver, + StreamDefinitionService streamDefinitionService, + PropertyResolver propertyResolver) { return new AppDeploymentRequestCreator(appRegistry, commonApplicationProperties, - applicationConfigurationMetadataResolver, streamDefinitionService); + applicationConfigurationMetadataResolver, streamDefinitionService, propertyResolver); } @Bean @@ -499,7 +544,7 @@ public TaskSchedulerController taskSchedulerController(SchedulerService schedule public static class AuditingConfiguration { @Bean public AuditRecordService auditRecordService(AuditRecordRepository auditRecordRepository, - ObjectMapper objectMapper) { + ObjectMapper objectMapper) { return new DefaultAuditRecordService(auditRecordRepository); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerConfiguration.java index 4d26486147..96bdbbfe6b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2019 the original author or authors. + * Copyright 2015-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,36 +20,31 @@ import javax.servlet.Filter; import javax.sql.DataSource; -import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; -import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; -import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; import org.springframework.beans.factory.ObjectProvider; import org.springframework.boot.autoconfigure.batch.BatchProperties; import org.springframework.boot.autoconfigure.transaction.TransactionManagerCustomizers; import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskConfiguration; +import org.springframework.cloud.dataflow.aggregate.task.TaskRepositoryContainer; +import org.springframework.cloud.dataflow.aggregate.task.impl.DefaultTaskRepositoryContainer; import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.completion.CompletionConfiguration; import org.springframework.cloud.dataflow.registry.repository.AppRegistrationRepositoryCustom; import org.springframework.cloud.dataflow.registry.repository.AppRegistrationRepositoryImpl; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.schema.service.SchemaServiceConfiguration; import org.springframework.cloud.dataflow.server.config.apps.CommonApplicationProperties; import org.springframework.cloud.dataflow.server.config.features.FeaturesConfiguration; import org.springframework.cloud.dataflow.server.config.web.WebConfiguration; import org.springframework.cloud.dataflow.server.db.migration.DataFlowFlywayConfigurationCustomizer; -import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDao; -import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDao; -import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; -import org.springframework.cloud.dataflow.server.repository.JdbcDataflowJobExecutionDao; -import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionDao; -import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionMetadataDao; import org.springframework.cloud.dataflow.server.support.AuthenticationSuccessEventListener; import org.springframework.cloud.task.configuration.TaskProperties; -import org.springframework.cloud.task.repository.support.DatabaseType; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; import org.springframework.data.web.config.EnableSpringDataWebSupport; -import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.web.filter.ForwardedHeaderFilter; @@ -67,11 +62,22 @@ * @author Josh Long * @author Michael Minella * @author Gunnar Hillert + * @author Michael Wirth + * @author Corneil du Plessis */ @EnableSpringDataWebSupport @Configuration -@Import({ CompletionConfiguration.class, FeaturesConfiguration.class, WebConfiguration.class }) +@Import({ + CompletionConfiguration.class, + FeaturesConfiguration.class, + WebConfiguration.class, + H2ServerConfiguration.class, + SchemaServiceConfiguration.class, + AggregateTaskConfiguration.class, + AggregateDataFlowTaskConfiguration.class +}) @EnableConfigurationProperties({ BatchProperties.class, CommonApplicationProperties.class }) +@ComponentScan(basePackages = {"org.springframework.cloud.dataflow.schema.service", "org.springframework.cloud.dataflow.aggregate.task"}) public class DataFlowServerConfiguration { @Bean @@ -93,34 +99,12 @@ public PlatformTransactionManager transactionManager( return transactionManager; } - @Bean - DataflowJobExecutionDao dataflowJobExecutionDao(DataSource dataSource) { - return new JdbcDataflowJobExecutionDao(dataSource, AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX); - } @Bean public TaskProperties taskProperties() { return new TaskProperties(); } - @Bean - DataflowTaskExecutionDao dataflowTaskExecutionDao(DataSource dataSource, TaskProperties taskProperties) { - return new JdbcDataflowTaskExecutionDao(dataSource, taskProperties); - } - - @Bean - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao(DataSource dataSource) { - DataFieldMaxValueIncrementerFactory incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); - String databaseType; - try { - databaseType = DatabaseType.fromMetaData(dataSource).name(); - } - catch (MetaDataAccessException e) { - throw new IllegalStateException(e); - } - return new JdbcDataflowTaskExecutionMetadataDao(dataSource, incrementerFactory.getIncrementer(databaseType, - "task_execution_metadata_seq")); - } @Bean public AuthenticationSuccessEventListener authenticationSuccessEventListener( diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowAsyncAutoConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowAsyncAutoConfiguration.java new file mode 100644 index 0000000000..149de5c003 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowAsyncAutoConfiguration.java @@ -0,0 +1,76 @@ +/* + * Copyright 2016-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.config; + +import java.util.concurrent.Executor; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.aop.interceptor.AsyncUncaughtExceptionHandler; +import org.springframework.boot.autoconfigure.AutoConfigureAfter; +import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.autoconfigure.task.TaskExecutionAutoConfiguration; +import org.springframework.boot.task.TaskExecutorBuilder; +import org.springframework.cloud.dataflow.core.DataFlowPropertyKeys; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.scheduling.annotation.AsyncConfigurer; +import org.springframework.scheduling.annotation.EnableAsync; + +import static org.springframework.cloud.dataflow.server.config.DataflowAsyncAutoConfiguration.ASYNC_PROPS_PREFIX; + +/** + * Enables async executions for the Spring Cloud Dataflow server. + * Uses the Spring Boot autoconfigured {@code TaskExecutorBuilder} to create an async executor and register it + * with name {@link #DATAFLOW_ASYNC_EXECUTOR}. + * + * @author Tobias Soloschenko + */ +@Configuration(proxyBeanMethods = false) +@ConditionalOnBean({EnableDataFlowServerConfiguration.Marker.class}) +@ConditionalOnProperty(prefix = ASYNC_PROPS_PREFIX, name = "enabled", havingValue = "true") +@AutoConfigureAfter(TaskExecutionAutoConfiguration.class) +@EnableAsync +public class DataflowAsyncAutoConfiguration implements AsyncConfigurer { + + private static final Logger logger = LoggerFactory.getLogger(DataflowAsyncAutoConfiguration.class); + + public static final String ASYNC_PROPS_PREFIX = DataFlowPropertyKeys.PREFIX + "async"; + + public static final String DATAFLOW_ASYNC_EXECUTOR = "dataflowAsyncExecutor"; + + private static final String THREAD_NAME_PREFIX = "scdf-async-"; + + private final TaskExecutorBuilder taskExecutorBuilder; + + public DataflowAsyncAutoConfiguration(TaskExecutorBuilder taskExecutorBuilder) { + this.taskExecutorBuilder = taskExecutorBuilder; + } + + @Bean(name = DATAFLOW_ASYNC_EXECUTOR) + @Override + public Executor getAsyncExecutor() { + return this.taskExecutorBuilder.threadNamePrefix(THREAD_NAME_PREFIX).build(); + } + + @Override + public AsyncUncaughtExceptionHandler getAsyncUncaughtExceptionHandler() { + return (throwable, method, objects) -> logger.error("Exception thrown in @Async Method " + method.getName(), + throwable); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowOAuthSecurityConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowOAuthSecurityConfiguration.java index cf54311073..8ecd12990b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowOAuthSecurityConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowOAuthSecurityConfiguration.java @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.springframework.cloud.dataflow.server.config; import org.springframework.cloud.common.security.OAuthSecurityConfiguration; diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/H2ServerConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/H2ServerConfiguration.java new file mode 100644 index 0000000000..a099e95198 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/H2ServerConfiguration.java @@ -0,0 +1,91 @@ +/* + * Copyright 2022-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.config; + +import java.sql.SQLException; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.h2.tools.Server; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.beans.factory.config.BeanFactoryPostProcessor; +import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; +import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; +import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * Autostart an embedded H2 database server. + * + * @author Michael Wirth + * @author Corneil du Plessis + */ +@Configuration(proxyBeanMethods = false) +@ConditionalOnClass(Server.class) +@ConditionalOnProperty(name = "spring.dataflow.embedded.database.enabled", havingValue = "true", matchIfMissing = true) +@ConditionalOnExpression("'${spring.datasource.url:#{null}}'.startsWith('jdbc:h2:tcp://localhost')") +public class H2ServerConfiguration { + + private static final Logger logger = LoggerFactory.getLogger(H2ServerConfiguration.class); + + private static final Pattern JDBC_URL_PATTERN = Pattern.compile("^jdbc:h2:tcp://localhost:(?\\d+)"); + + @Bean + public H2ServerBeanFactoryPostProcessor h2ServerBeanFactoryPostProcessor() { + return new H2ServerBeanFactoryPostProcessor(); + } + + @Bean(destroyMethod = "stop") + public Server h2TcpServer(@Value("${spring.datasource.url}") String dataSourceUrl) { + logger.info("Starting H2 Server with URL: " + dataSourceUrl); + + Matcher matcher = JDBC_URL_PATTERN.matcher(dataSourceUrl); + if (!matcher.find()) { + throw new IllegalArgumentException( + "DataSource URL '" + dataSourceUrl + "' does not match regex pattern: " + + JDBC_URL_PATTERN.pattern()); + } + + String port = matcher.group("port"); + try { + return Server.createTcpServer("-ifNotExists", "-tcp", + "-tcpAllowOthers", "-tcpPort", port).start(); + } + catch (SQLException e) { + throw new IllegalStateException(e); + } + + } + + /** + * A {@link BeanFactoryPostProcessor} whose sole job is to ensure that the H2 server is up and running before any + * datasource initialization is attempted. It does this by requesting the H2Server bean which then in turn starts up + * the server. + */ + static class H2ServerBeanFactoryPostProcessor implements BeanFactoryPostProcessor { + + @Override + public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { + beanFactory.getBean("h2TcpServer"); + } + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/MetricsReplicationEnvironmentPostProcessor.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/MetricsReplicationEnvironmentPostProcessor.java index 629c5cd2e4..cdfcd7e830 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/MetricsReplicationEnvironmentPostProcessor.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/MetricsReplicationEnvironmentPostProcessor.java @@ -18,8 +18,11 @@ import java.lang.reflect.Field; import java.lang.reflect.Modifier; +import java.util.Optional; import java.util.Properties; import java.util.function.Consumer; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import io.micrometer.prometheus.rsocket.autoconfigure.PrometheusRSocketClientProperties; import org.apache.commons.lang3.exception.ExceptionUtils; @@ -37,6 +40,7 @@ import org.springframework.cloud.dataflow.server.config.apps.CommonApplicationProperties; import org.springframework.core.Ordered; import org.springframework.core.env.ConfigurableEnvironment; +import org.springframework.core.env.Environment; import org.springframework.core.env.PropertiesPropertySource; import org.springframework.util.StringUtils; @@ -44,22 +48,26 @@ * This post-processor helps to replicate the metrics property defined for the DataFlow server to the * spring.cloud.dataflow.applicationProperties.stream.* and spring.cloud.dataflow.applicationProperties.task.* as well. * This allows to reuse the same metrics configuration for all deployed stream applications and launched tasks. - * + *
    * The post-processor also automatically computes some of the the Monitoring Dashboard properties from the server's * metrics properties. - * + *
    * Only the properties not explicitly set are updated. That means that you can explicitly set any monitoring dashboard or * stream/task metrics and your settings will be honored. * * @author Christian Tzolov + * @author Chris Bono */ public class MetricsReplicationEnvironmentPostProcessor implements EnvironmentPostProcessor, Ordered { private static final Logger logger = LoggerFactory.getLogger(MetricsReplicationEnvironmentPostProcessor.class); private static final String PROPERTY_SOURCE_KEY_NAME = MetricsReplicationEnvironmentPostProcessor.class.getName(); - public static final String MONITORING_PREFIX = retrievePropertyPrefix(DataflowMetricsProperties.class); - public static final String MONITORING_DASHBOARD_PREFIX = MONITORING_PREFIX + ".dashboard"; - public static final String COMMON_APPLICATION_PREFIX = retrievePropertyPrefix(CommonApplicationProperties.class); + private static final String MONITORING_PREFIX = retrievePropertyPrefix(DataflowMetricsProperties.class); + private static final String MONITORING_DASHBOARD_PREFIX = MONITORING_PREFIX + ".dashboard"; + private static final String COMMON_APPLICATION_PREFIX = retrievePropertyPrefix(CommonApplicationProperties.class); + private static final String COMMON_STREAM_PROPS_PREFIX = COMMON_APPLICATION_PREFIX + ".stream."; + private static final String COMMON_TASK_PROPS_PREFIX = COMMON_APPLICATION_PREFIX + ".task."; + private static final Pattern METRIC_PROP_NAME_PATTERN = Pattern.compile("(management\\.)(metrics\\.export\\.)(\\w+\\.)(.+)"); @Override public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) { @@ -76,34 +84,23 @@ public void postProcessEnvironment(ConfigurableEnvironment environment, SpringAp // 2. Replicates the server's metrics properties to the applicationProperties.stream // and applicationProperties.task. if (environment.getProperty(MONITORING_PREFIX + ".property-replication", Boolean.class, true)) { - // Callback function that checks if the input property is set the server's configuration. If it is then - // the property is replicated as a common Stream and Task property. - Consumer propertyReplicator = metricsPropertyName -> { - if (environment.containsProperty(metricsPropertyName)) { + // Callback function to handle property replication + Consumer propertyReplicator = metricsPropName -> { + if (environment.containsProperty(metricsPropName)) { try { - String serverPropertyValue = environment.getProperty(metricsPropertyName); - // Overrides only the Stream applicationProperties that have not been set explicitly. - String commonStreamPropertyName = COMMON_APPLICATION_PREFIX + ".stream." + metricsPropertyName; - if (!environment.containsProperty(commonStreamPropertyName)) { - logger.info("Replicate metrics property:" + commonStreamPropertyName + "=" + serverPropertyValue); - // if a property with same key occurs multiple times only the first is set. - additionalProperties.putIfAbsent(commonStreamPropertyName, serverPropertyValue); - } - // Overrides only the Task applicationProperties that have not been set explicitly. - String commonTaskPropertyName = COMMON_APPLICATION_PREFIX + ".task." + metricsPropertyName; - if (!environment.containsProperty(commonTaskPropertyName)) { - logger.info("Replicate metrics property:" + commonTaskPropertyName + "=" + serverPropertyValue); - // if a property with same key occurs multiple times only the first is set. - additionalProperties.putIfAbsent(commonTaskPropertyName, serverPropertyValue); - } + String serverPropValue = environment.getProperty(metricsPropName); + ensurePropIsReplicatedExactlyOnceToCommonStreamsAndTasksProps(metricsPropName, serverPropValue, + environment, additionalProperties); + metricsPropertyNameInBoot3(metricsPropName).ifPresent((metricsPropNameBoot3) -> + ensurePropIsReplicatedExactlyOnceToCommonStreamsAndTasksProps(metricsPropNameBoot3, + serverPropValue, environment, additionalProperties)); } catch (Throwable throwable) { - logger.error("Failed with replicating {}, because of {}", metricsPropertyName, + logger.error("Failed with replicating {}, because of {}", metricsPropName, ExceptionUtils.getRootCauseMessage(throwable)); } } }; - this.replicateServerMetricsPropertiesToStreamAndTask(environment, WavefrontProperties.class, propertyReplicator); this.replicateServerMetricsPropertiesToStreamAndTask(environment, InfluxProperties.class, propertyReplicator); this.replicateServerMetricsPropertiesToStreamAndTask(environment, PrometheusProperties.class, propertyReplicator); @@ -121,6 +118,44 @@ public void postProcessEnvironment(ConfigurableEnvironment environment, SpringAp } } + private void ensurePropIsReplicatedExactlyOnceToCommonStreamsAndTasksProps(String metricsPropName, Object serverPropValue, + Environment environment, Properties additionalProperties) { + ensurePropIsReplicatedExactlyOnceToCommonProps(metricsPropName, serverPropValue, COMMON_STREAM_PROPS_PREFIX, + environment, additionalProperties); + ensurePropIsReplicatedExactlyOnceToCommonProps(metricsPropName, serverPropValue, COMMON_TASK_PROPS_PREFIX, + environment, additionalProperties); + } + + private void ensurePropIsReplicatedExactlyOnceToCommonProps(String metricsPropName, + Object serverPropValue, String commonPropsPrefix, Environment environment, Properties additionalProperties) { + // Only add if not already added explicitly + String commonStreamPropName = commonPropsPrefix + metricsPropName; + if (!environment.containsProperty(commonStreamPropName)) { + logger.info("Replicate metrics property:{}={}", commonStreamPropName, serverPropValue); + // Only add it once + additionalProperties.putIfAbsent(commonStreamPropName, serverPropValue); + } + } + + private Optional metricsPropertyNameInBoot3(String metricsPropertyName) { + // Handle the Spring Boot 3 form of the metrics property + // + // Boot 2.x: 'management.metrics.export..' + // Boot 3.x: 'management..metrics.export.' + // + // Regex breaks the original into 4 groups: + // 1 2 3 4 + // (management.)(metrics.export.)(.)() + // + // We simply swap groups 2 and 3 to get Boot3 version of the property + // + Matcher matcher = METRIC_PROP_NAME_PATTERN.matcher(metricsPropertyName); + if (matcher.matches()) { + return Optional.of(matcher.group(1) + matcher.group(3) + matcher.group(2) + matcher.group(4)); + } + return Optional.empty(); + } + /** * Checks if the management.metrics.export..enabled property is set to ture for the provided * meterRegistryPropertyClass. @@ -133,7 +168,7 @@ public void postProcessEnvironment(ConfigurableEnvironment environment, SpringAp */ private boolean isMetricsRegistryEnabled(Class meterRegistryPropertyClass, ConfigurableEnvironment environment) { String metricsPrefix = retrievePropertyPrefix(meterRegistryPropertyClass); - return !StringUtils.isEmpty(metricsPrefix) && + return StringUtils.hasText(metricsPrefix) && environment.getProperty(metricsPrefix + ".enabled", Boolean.class, false); } @@ -146,7 +181,7 @@ private boolean isMetricsRegistryEnabled(Class meterRegistryPropertyClass, Co private static String retrievePropertyPrefix(Class metricsPropertyClass) { if (metricsPropertyClass.isAnnotationPresent(ConfigurationProperties.class)) { ConfigurationProperties cp = metricsPropertyClass.getAnnotation(ConfigurationProperties.class); - return StringUtils.isEmpty(cp.prefix()) ? cp.value() : cp.prefix(); + return StringUtils.hasText(cp.prefix()) ? cp.prefix() : cp.value(); } return null; } @@ -212,7 +247,7 @@ private void replicateServerMetricsPropertiesToStreamAndTask(ConfigurableEnviron * Converts the class fields into metrics property candidates and handles them to the replication handler * to process. The metrics prefix is retrieved from the {@link ConfigurationProperties} annotation. * Drops the non-annotated classes. - * + *
    * The traversePropertyClassFields iterates and repeats the computation over the class's parent * classes when available. * @@ -221,7 +256,7 @@ private void replicateServerMetricsPropertiesToStreamAndTask(ConfigurableEnviron */ private void traversePropertyClassFields(Class metricsPropertyClass, Consumer metricsReplicationHandler) { String metricsPrefix = retrievePropertyPrefix(metricsPropertyClass); - if (!StringUtils.isEmpty(metricsPrefix)) { + if (StringUtils.hasText(metricsPrefix)) { do { traverseClassFieldsRecursively(metricsPropertyClass, metricsPrefix, metricsReplicationHandler); // traverse the parent class if not Object. @@ -234,7 +269,7 @@ private void traversePropertyClassFields(Class metricsPropertyClass, Consumer * Iterate over the fields of the provided class. For non-inner class fields generate a metrics property candidate * and pass it to the metrics replication handler for processing. For the inner-class fields extend the * prefix with the name of the field and call traverseClassFieldsRecursively recursively. - * + *
    * Use the RelaxedNames.camelCaseToHyphenLower utility to convert the field names into property keys. * * @param metricsPropertyClass Class to be processed. diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/SpringDocAutoConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/SpringDocAutoConfiguration.java new file mode 100644 index 0000000000..a6989bca89 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/SpringDocAutoConfiguration.java @@ -0,0 +1,133 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.config; + +import javax.annotation.PostConstruct; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springdoc.core.SpringDocConfigProperties; +import org.springdoc.core.SpringDocConfiguration; +import org.springdoc.core.SwaggerUiConfigProperties; +import org.springdoc.webmvc.ui.SwaggerConfig; + +import org.springframework.boot.autoconfigure.AutoConfigureAfter; +import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; +import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.boot.web.servlet.FilterRegistrationBean; +import org.springframework.cloud.dataflow.server.support.SpringDocJsonDecodeFilter; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.web.configuration.WebSecurityCustomizer; + +/** + * Makes SpringDoc public available without any authentication required by initializing a {@link WebSecurityCustomizer} and + * applying all path of SpringDoc to be ignored. Also applies a filter registration bean to unescape JSON content for the + * SpringDoc frontend. + * + * @author Tobias Soloschenko + */ +@Configuration(proxyBeanMethods = false) +@ConditionalOnClass({ SpringDocConfigProperties.class, SwaggerUiConfigProperties.class }) +@ConditionalOnBean({ SpringDocConfigProperties.class, SwaggerUiConfigProperties.class }) +@AutoConfigureAfter({ SpringDocConfiguration.class, SwaggerConfig.class }) +public class SpringDocAutoConfiguration { + + private static final Logger logger = LoggerFactory.getLogger(SpringDocAutoConfiguration.class); + + private final SpringDocConfigProperties springDocConfigProperties; + + private final SwaggerUiConfigProperties swaggerUiConfigProperties; + + /** + * Creates the SpringDocConfiguration with the given properties. + * + * @param springDocConfigProperties the spring doc config properties + * @param swaggerUiConfigProperties the swagger ui config properties + */ + public SpringDocAutoConfiguration(SpringDocConfigProperties springDocConfigProperties, + SwaggerUiConfigProperties swaggerUiConfigProperties) { + this.springDocConfigProperties = springDocConfigProperties; + this.swaggerUiConfigProperties = swaggerUiConfigProperties; + } + + @PostConstruct + void init() { + logger.info("SpringDoc enabled"); + } + + /** + * Creates a web security customizer for the spring security which makes the SpringDoc frontend public available. + * + * @return a web security customizer with security settings for SpringDoc + */ + @Bean + @ConditionalOnMissingBean + public WebSecurityCustomizer springDocWebSecurityCustomizer() { + return (webSecurity -> webSecurity.ignoring().antMatchers( + "/swagger-ui/**", + getApiDocsPathContext() + "/**", + swaggerUiConfigProperties.getPath(), + swaggerUiConfigProperties.getConfigUrl(), + swaggerUiConfigProperties.getValidatorUrl(), + swaggerUiConfigProperties.getOauth2RedirectUrl(), + springDocConfigProperties.getWebjars().getPrefix(), + springDocConfigProperties.getWebjars().getPrefix() + "/**")); + } + + /** + * Applies {@link SpringDocJsonDecodeFilter} to the filter chain which decodes the JSON of ApiDocs and SwaggerUi so that the SpringDoc frontend is able + * to read it. Spring Cloud Data Flow however requires the JSON to be escaped and wrapped into quotes, because the + * Angular Ui frontend is using it that way. + * + * @return a filter registration bean which unescapes the content of the JSON endpoints of SpringDoc before it is returned. + */ + @Bean + @ConditionalOnMissingBean(name = "springDocJsonDecodeFilterRegistration") + public FilterRegistrationBean springDocJsonDecodeFilterRegistration() { + String apiDocsPathContext = getApiDocsPathContext(); + String swaggerUiConfigContext = getSwaggerUiConfigContext(); + FilterRegistrationBean registrationBean = new FilterRegistrationBean<>(); + registrationBean.setFilter(new SpringDocJsonDecodeFilter()); + registrationBean.addUrlPatterns(apiDocsPathContext, apiDocsPathContext + "/*", swaggerUiConfigContext, + swaggerUiConfigContext + "/*"); + return registrationBean; + } + + /** + * Gets the SwaggerUi config context. For example the default configuration for the SwaggerUi config is /v3/api-docs/swagger-config + * which results in a context of /v3/api-docs. + * + * @return the SwaggerUi config path context + */ + private String getSwaggerUiConfigContext() { + String swaggerUiConfigUrl = swaggerUiConfigProperties.getConfigUrl(); + return swaggerUiConfigUrl.substring(0, swaggerUiConfigUrl.lastIndexOf("/")); + } + + /** + * Gets the ApiDocs context path. For example the default configuration for the ApiDocs path is /v3/api-docs + * which results in a context of /v3. + * + * @return the api docs path context + */ + private String getApiDocsPathContext() { + String apiDocsPath = springDocConfigProperties.getApiDocs().getPath(); + return apiDocsPath.substring(0, apiDocsPath.lastIndexOf("/")); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/LocalTaskPlatformFactory.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/LocalTaskPlatformFactory.java index 140955b120..a8da638e0e 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/LocalTaskPlatformFactory.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/LocalTaskPlatformFactory.java @@ -42,7 +42,7 @@ public LocalTaskPlatformFactory(LocalPlatformProperties platformProperties, Sche @Override public TaskPlatform createTaskPlatform() { TaskPlatform taskPlatform = super.createTaskPlatform(); - if (taskPlatform.getLaunchers().size() == 0) { + if (taskPlatform.getLaunchers().isEmpty()) { taskPlatform.setLaunchers(Collections.singletonList(createDefaultLauncher())); } return taskPlatform; @@ -68,14 +68,13 @@ private Launcher doCreateLauncher(String account, LocalDeployerProperties deploy private String prettyPrintLocalDeployerProperties(LocalDeployerProperties localDeployerProperties) { StringBuilder builder = new StringBuilder(); if (localDeployerProperties.getJavaOpts() != null) { - builder.append("JavaOpts = [" + localDeployerProperties.getJavaOpts() + "], "); + builder.append("JavaOpts = [").append(localDeployerProperties.getJavaOpts()).append("], "); } - builder.append("ShutdownTimeout = [" + localDeployerProperties.getShutdownTimeout() + "], "); - builder.append("EnvVarsToInherit = [" - + StringUtils.arrayToCommaDelimitedString(localDeployerProperties.getEnvVarsToInherit()) + "], "); - builder.append("JavaCmd = [" + localDeployerProperties.getJavaCmd() + "], "); - builder.append("WorkingDirectoriesRoot = [" + localDeployerProperties.getWorkingDirectoriesRoot() + "], "); - builder.append("DeleteFilesOnExit = [" + localDeployerProperties.isDeleteFilesOnExit() + "]"); + builder.append("ShutdownTimeout = [").append(localDeployerProperties.getShutdownTimeout()).append("], "); + builder.append("EnvVarsToInherit = [").append(StringUtils.arrayToCommaDelimitedString(localDeployerProperties.getEnvVarsToInherit())).append("], "); + builder.append("JavaCmd = [").append(localDeployerProperties.getJavaCmd()).append("], "); + builder.append("WorkingDirectoriesRoot = [").append(localDeployerProperties.getWorkingDirectoriesRoot()).append("], "); + builder.append("DeleteFilesOnExit = [").append(localDeployerProperties.isDeleteFilesOnExit()).append("]"); return builder.toString(); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/SchedulerConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/SchedulerConfiguration.java index 2e0c9ea2f0..e976348e54 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/SchedulerConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/SchedulerConfiguration.java @@ -27,6 +27,8 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver; import org.springframework.cloud.dataflow.core.TaskPlatform; @@ -35,12 +37,14 @@ import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.service.SchedulerService; import org.springframework.cloud.dataflow.server.service.SchedulerServiceProperties; +import org.springframework.cloud.dataflow.server.service.TaskExecutionInfoService; import org.springframework.cloud.dataflow.server.service.impl.ComposedTaskRunnerConfigurationProperties; import org.springframework.cloud.dataflow.server.service.impl.DefaultSchedulerService; import org.springframework.cloud.dataflow.server.service.impl.TaskConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; +import org.springframework.core.env.PropertyResolver; import org.springframework.core.io.ResourceLoader; /** @@ -72,12 +76,28 @@ public SchedulerService schedulerService(CommonApplicationProperties commonAppli ApplicationConfigurationMetadataResolver metaDataResolver, SchedulerServiceProperties schedulerServiceProperties, AuditRecordService auditRecordService, + AggregateExecutionSupport aggregateExecutionSupport, + TaskDefinitionReader taskDefinitionReader, + TaskExecutionInfoService taskExecutionInfoService, + PropertyResolver propertyResolver, ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties) { return new DefaultSchedulerService(commonApplicationProperties, - taskPlatforms, taskDefinitionRepository, registry, resourceLoader, - taskConfigurationProperties, dataSourceProperties, - this.dataflowServerUri, metaDataResolver, schedulerServiceProperties, auditRecordService, - composedTaskRunnerConfigurationProperties); + taskPlatforms, + taskDefinitionRepository, + registry, + resourceLoader, + taskConfigurationProperties, + dataSourceProperties, + this.dataflowServerUri, + metaDataResolver, + schedulerServiceProperties, + auditRecordService, + aggregateExecutionSupport, + taskDefinitionReader, + taskExecutionInfoService, + propertyResolver, + composedTaskRunnerConfigurationProperties + ); } public static class SchedulerConfigurationPropertyChecker extends AllNestedConditions { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/TaskConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/TaskConfiguration.java index 317c09feb0..a0f20c5b76 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/TaskConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/TaskConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2020 the original author or authors. + * Copyright 2016-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,36 +13,46 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.springframework.cloud.dataflow.server.config.features; import java.util.List; import javax.sql.DataSource; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.explore.support.JobExplorerFactoryBean; -import org.springframework.batch.core.launch.support.SimpleJobLauncher; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskConfiguration; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; +import org.springframework.cloud.dataflow.aggregate.task.DataflowTaskExecutionQueryDao; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskDeploymentReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskRepositoryContainer; import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver; import org.springframework.cloud.dataflow.core.TaskPlatform; import org.springframework.cloud.dataflow.registry.service.AppRegistryService; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.schema.service.SchemaServiceConfiguration; import org.springframework.cloud.dataflow.server.DockerValidatorProperties; -import org.springframework.cloud.dataflow.server.batch.JobService; -import org.springframework.cloud.dataflow.server.batch.SimpleJobServiceFactoryBean; +import org.springframework.cloud.dataflow.server.config.AggregateDataFlowTaskConfiguration; import org.springframework.cloud.dataflow.server.config.apps.CommonApplicationProperties; import org.springframework.cloud.dataflow.server.job.LauncherRepository; -import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDao; -import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDao; -import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.AggregateJobQueryDao; +import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDaoContainer; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDaoContainer; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDaoContainer; +import org.springframework.cloud.dataflow.server.repository.DefaultTaskDefinitionReader; +import org.springframework.cloud.dataflow.server.repository.DefaultTaskDeploymentReader; import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository; import org.springframework.cloud.dataflow.server.service.DeployerConfigurationMetadataResolver; +import org.springframework.cloud.dataflow.server.service.JobServiceContainer; import org.springframework.cloud.dataflow.server.service.LauncherInitializationService; import org.springframework.cloud.dataflow.server.service.SchedulerService; import org.springframework.cloud.dataflow.server.service.TaskDeleteService; @@ -61,14 +71,13 @@ import org.springframework.cloud.dataflow.server.service.impl.TaskAppDeploymentRequestCreator; import org.springframework.cloud.dataflow.server.service.impl.TaskConfigurationProperties; import org.springframework.cloud.deployer.spi.scheduler.Scheduler; -import org.springframework.cloud.task.repository.TaskExplorer; -import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Profile; +import org.springframework.core.env.PropertyResolver; import org.springframework.data.map.repository.config.EnableMapRepositories; import org.springframework.lang.Nullable; -import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.annotation.EnableTransactionManagement; /** @@ -80,22 +89,30 @@ * @author Gunnar Hillert * @author Christian Tzolov * @author David Turanski + * @author Corneil du Plessis */ -@Configuration +@Configuration(proxyBeanMethods = false) @ConditionalOnTasksEnabled -@EnableConfigurationProperties({ TaskConfigurationProperties.class, CommonApplicationProperties.class, - DockerValidatorProperties.class, LocalPlatformProperties.class, ComposedTaskRunnerConfigurationProperties.class +@EnableConfigurationProperties({ + TaskConfigurationProperties.class, + CommonApplicationProperties.class, + DockerValidatorProperties.class, + LocalPlatformProperties.class, + ComposedTaskRunnerConfigurationProperties.class }) @EnableMapRepositories(basePackages = "org.springframework.cloud.dataflow.server.job") @EnableTransactionManagement +@Import({ + TaskConfiguration.TaskDeleteServiceConfig.class, + SchemaServiceConfiguration.class, + AggregateTaskConfiguration.class, + AggregateDataFlowTaskConfiguration.class +}) public class TaskConfiguration { @Autowired DataSourceProperties dataSourceProperties; - @Autowired(required = false) - SchedulerService schedulerService; - @Value("${spring.cloud.dataflow.server.uri:}") private String dataflowServerUri; @@ -105,9 +122,22 @@ public class TaskConfiguration { @Autowired private ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties; + @Bean + @ConditionalOnMissingBean + public TaskDefinitionReader taskDefinitionReader(TaskDefinitionRepository taskDefinitionRepository) { + return new DefaultTaskDefinitionReader(taskDefinitionRepository); + } + + @Bean + @ConditionalOnMissingBean + public TaskDeploymentReader taskDeploymentReader(TaskDeploymentRepository repository) { + return new DefaultTaskDeploymentReader(repository); + } + @Bean public DeployerConfigurationMetadataResolver deployerConfigurationMetadataResolver( - TaskConfigurationProperties taskConfigurationProperties) { + TaskConfigurationProperties taskConfigurationProperties + ) { return new DeployerConfigurationMetadataResolver(taskConfigurationProperties.getDeployerProperties()); } @@ -115,22 +145,25 @@ public DeployerConfigurationMetadataResolver deployerConfigurationMetadataResolv public LauncherInitializationService launcherInitializationService( LauncherRepository launcherRepository, List platforms, - DeployerConfigurationMetadataResolver resolver) { + DeployerConfigurationMetadataResolver resolver + ) { return new LauncherInitializationService(launcherRepository, platforms, resolver); } /** * The default profile is active when no other profiles are active. This is configured so * that several tests will pass without having to explicitly enable the local profile. - * @param localPlatformProperties the local platform properties - * @param localScheduler the local scheduler * + * @param localPlatformProperties the local platform properties + * @param localScheduler the local scheduler * @return the task platform */ - @Profile({ "local", "default" }) + @Profile({"local", "default"}) @Bean - public TaskPlatform localTaskPlatform(LocalPlatformProperties localPlatformProperties, - @Nullable Scheduler localScheduler) { + public TaskPlatform localTaskPlatform( + LocalPlatformProperties localPlatformProperties, + @Nullable Scheduler localScheduler + ) { TaskPlatform taskPlatform = new LocalTaskPlatformFactory(localPlatformProperties, localScheduler) .createTaskPlatform(); taskPlatform.setPrimary(true); @@ -138,111 +171,152 @@ public TaskPlatform localTaskPlatform(LocalPlatformProperties localPlatformPrope } @Bean - public TaskExecutionInfoService taskDefinitionRetriever(AppRegistryService registry, - TaskExplorer taskExplorer, TaskDefinitionRepository taskDefinitionRepository, + public TaskExecutionInfoService taskDefinitionRetriever( + AppRegistryService registry, + AggregateTaskExplorer taskExplorer, + TaskDefinitionRepository taskDefinitionRepository, TaskConfigurationProperties taskConfigurationProperties, - LauncherRepository launcherRepository, List taskPlatforms, - ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties) { + LauncherRepository launcherRepository, + List taskPlatforms, + ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties + ) { return new DefaultTaskExecutionInfoService(dataSourceProperties, registry, taskExplorer, taskDefinitionRepository, taskConfigurationProperties, launcherRepository, taskPlatforms, composedTaskRunnerConfigurationProperties); } @Bean - public TaskDeleteService deleteTaskService(TaskExplorer taskExplorer, LauncherRepository launcherRepository, - TaskDefinitionRepository taskDefinitionRepository, TaskDeploymentRepository taskDeploymentRepository, - AuditRecordService auditRecordService, - DataflowTaskExecutionDao dataflowTaskExecutionDao, - DataflowJobExecutionDao dataflowJobExecutionDao, - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, - TaskConfigurationProperties taskConfigurationProperties, - DataSource dataSource) { - return new DefaultTaskDeleteService(taskExplorer, launcherRepository, taskDefinitionRepository, - taskDeploymentRepository, - auditRecordService, - dataflowTaskExecutionDao, - dataflowJobExecutionDao, - dataflowTaskExecutionMetadataDao, - this.schedulerService, - taskConfigurationProperties, - dataSource); - } - - @Bean - public TaskSaveService saveTaskService(TaskDefinitionRepository taskDefinitionRepository, - AuditRecordService auditRecordService, AppRegistryService registry) { + public TaskSaveService saveTaskService( + TaskDefinitionRepository taskDefinitionRepository, + AuditRecordService auditRecordService, AppRegistryService registry + ) { return new DefaultTaskSaveService(taskDefinitionRepository, auditRecordService, registry); } @Bean - public TaskExecutionCreationService taskExecutionRepositoryService(TaskRepository taskRepository) { - return new DefaultTaskExecutionRepositoryService(taskRepository); + public TaskExecutionCreationService taskExecutionRepositoryService( + TaskRepositoryContainer taskRepositoryContainer, + AggregateExecutionSupport aggregateExecutionSupport, + TaskDefinitionReader taskDefinitionReader + ) { + return new DefaultTaskExecutionRepositoryService(taskRepositoryContainer, aggregateExecutionSupport, taskDefinitionReader); } @Bean public TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator( CommonApplicationProperties commonApplicationProperties, - ApplicationConfigurationMetadataResolver metadataResolver) { + ApplicationConfigurationMetadataResolver metadataResolver + ) { return new TaskAppDeploymentRequestCreator(commonApplicationProperties, metadataResolver, dataflowServerUri); } - @Bean - public TaskExecutionService taskService(LauncherRepository launcherRepository, - AuditRecordService auditRecordService, - TaskRepository taskRepository, - TaskExecutionInfoService taskExecutionInfoService, - TaskDeploymentRepository taskDeploymentRepository, - TaskExecutionCreationService taskExecutionRepositoryService, - TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, - TaskExplorer taskExplorer, - DataflowTaskExecutionDao dataflowTaskExecutionDao, - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, - @Nullable OAuth2TokenUtilsService oauth2TokenUtilsService, - TaskSaveService taskSaveService) { - DefaultTaskExecutionService defaultTaskExecutionService = new DefaultTaskExecutionService( - launcherRepository, auditRecordService, taskRepository, - taskExecutionInfoService, taskDeploymentRepository, taskExecutionRepositoryService, - taskAppDeploymentRequestCreator, taskExplorer, dataflowTaskExecutionDao, - dataflowTaskExecutionMetadataDao, oauth2TokenUtilsService, taskSaveService, - this.taskConfigurationProperties, this.composedTaskRunnerConfigurationProperties); - defaultTaskExecutionService.setAutoCreateTaskDefinitions(this.taskConfigurationProperties.isAutoCreateTaskDefinitions()); - return defaultTaskExecutionService; - } - - @Bean - public TaskJobService taskJobExecutionRepository(JobService service, TaskExplorer taskExplorer, - TaskDefinitionRepository taskDefinitionRepository, TaskExecutionService taskExecutionService, LauncherRepository launcherRepository) { - return new DefaultTaskJobService(service, taskExplorer, taskDefinitionRepository, taskExecutionService, launcherRepository); - } - - @Bean - public SimpleJobServiceFactoryBean simpleJobServiceFactoryBean(DataSource dataSource, - JobRepositoryFactoryBean repositoryFactoryBean, JobExplorer jobExplorer, - PlatformTransactionManager dataSourceTransactionManager) throws Exception { - SimpleJobServiceFactoryBean factoryBean = new SimpleJobServiceFactoryBean(); - factoryBean.setDataSource(dataSource); - factoryBean.setJobRepository(repositoryFactoryBean.getObject()); - factoryBean.setJobLauncher(new SimpleJobLauncher()); - factoryBean.setDataSource(dataSource); - factoryBean.setJobExplorer(jobExplorer); - factoryBean.setTransactionManager(dataSourceTransactionManager); - return factoryBean; + @Configuration + public static class TaskExecutionServiceConfig { + @Bean + public TaskExecutionService taskService( + PropertyResolver propertyResolver, + TaskConfigurationProperties taskConfigurationProperties, + ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties, + LauncherRepository launcherRepository, + AuditRecordService auditRecordService, + TaskRepositoryContainer taskRepositoryContainer, + TaskExecutionInfoService taskExecutionInfoService, + TaskDeploymentRepository taskDeploymentRepository, + TaskExecutionCreationService taskExecutionRepositoryService, + TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, + AggregateTaskExplorer taskExplorer, + DataflowTaskExecutionDaoContainer dataflowTaskExecutionDaoContainer, + DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer, + DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao, + @Nullable OAuth2TokenUtilsService oauth2TokenUtilsService, + TaskSaveService taskSaveService, + AggregateExecutionSupport aggregateExecutionSupport, + TaskDefinitionRepository taskDefinitionRepository, + TaskDefinitionReader taskDefinitionReader + ) { + DefaultTaskExecutionService defaultTaskExecutionService = new DefaultTaskExecutionService( + propertyResolver, + launcherRepository, + auditRecordService, + taskRepositoryContainer, + taskExecutionInfoService, + taskDeploymentRepository, + taskDefinitionRepository, + taskDefinitionReader, + taskExecutionRepositoryService, + taskAppDeploymentRequestCreator, + taskExplorer, + dataflowTaskExecutionDaoContainer, + dataflowTaskExecutionMetadataDaoContainer, + dataflowTaskExecutionQueryDao, + oauth2TokenUtilsService, + taskSaveService, + taskConfigurationProperties, + aggregateExecutionSupport, + composedTaskRunnerConfigurationProperties); + defaultTaskExecutionService.setAutoCreateTaskDefinitions(taskConfigurationProperties.isAutoCreateTaskDefinitions()); + return defaultTaskExecutionService; + } } - @Bean - public JobExplorerFactoryBean jobExplorerFactoryBean(DataSource dataSource) { - JobExplorerFactoryBean jobExplorerFactoryBean = new JobExplorerFactoryBean(); - jobExplorerFactoryBean.setDataSource(dataSource); - return jobExplorerFactoryBean; + @Configuration(proxyBeanMethods = false) + public static class TaskJobServiceConfig { + @Bean + public TaskJobService taskJobExecutionRepository( + JobServiceContainer serviceContainer, + AggregateTaskExplorer taskExplorer, + TaskDefinitionRepository taskDefinitionRepository, + TaskExecutionService taskExecutionService, + LauncherRepository launcherRepository, + AggregateExecutionSupport aggregateExecutionSupport, + AggregateJobQueryDao aggregateJobQueryDao, + TaskDefinitionReader taskDefinitionReader + ) { + return new DefaultTaskJobService( + serviceContainer, + taskExplorer, + taskDefinitionRepository, + taskExecutionService, + launcherRepository, + aggregateExecutionSupport, + aggregateJobQueryDao, + taskDefinitionReader + ); + } } - @Bean - public JobRepositoryFactoryBean jobRepositoryFactoryBean(DataSource dataSource, - PlatformTransactionManager platformTransactionManager) { - JobRepositoryFactoryBean repositoryFactoryBean = new JobRepositoryFactoryBean(); - repositoryFactoryBean.setDataSource(dataSource); - repositoryFactoryBean.setTransactionManager(platformTransactionManager); - return repositoryFactoryBean; + @Configuration(proxyBeanMethods = false) + public static class TaskDeleteServiceConfig { + @Bean + public TaskDeleteService deleteTaskService( + AggregateTaskExplorer taskExplorer, + LauncherRepository launcherRepository, + TaskDefinitionRepository taskDefinitionRepository, + TaskDeploymentRepository taskDeploymentRepository, + AuditRecordService auditRecordService, + DataflowTaskExecutionDaoContainer dataflowTaskExecutionDaoContainer, + DataflowJobExecutionDaoContainer dataflowJobExecutionDaoContainer, + DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer, + TaskConfigurationProperties taskConfigurationProperties, + DataSource dataSource, + SchemaService schemaService, + @Autowired(required = false) SchedulerService schedulerService + ) { + return new DefaultTaskDeleteService( + taskExplorer, + launcherRepository, + taskDefinitionRepository, + taskDeploymentRepository, + auditRecordService, + dataflowTaskExecutionDaoContainer, + dataflowJobExecutionDaoContainer, + dataflowTaskExecutionMetadataDaoContainer, + schedulerService, + schemaService, + taskConfigurationProperties, + dataSource + ); + } } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/web/WebConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/web/WebConfiguration.java index 8023310ffd..deb1a3dda0 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/web/WebConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/web/WebConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2019 the original author or authors. + * Copyright 2015-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,7 +15,6 @@ */ package org.springframework.cloud.dataflow.server.config.web; -import java.sql.SQLException; import java.util.Arrays; import java.util.Locale; import java.util.TimeZone; @@ -28,11 +27,8 @@ import io.micrometer.core.instrument.LongTaskTimer; import io.micrometer.core.instrument.Metrics; import io.micrometer.core.instrument.Tags; -import org.h2.tools.Server; -import org.slf4j.LoggerFactory; import org.springframework.beans.BeansException; -import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication; import org.springframework.boot.autoconfigure.http.HttpMessageConverters; @@ -40,17 +36,17 @@ import org.springframework.boot.web.servlet.ServletContextInitializer; import org.springframework.cloud.dataflow.rest.support.jackson.ISO8601DateFormatWithMilliSeconds; import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; +import org.springframework.cloud.dataflow.schema.AppBootVersionConverter; import org.springframework.context.ApplicationListener; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.event.ContextClosedEvent; +import org.springframework.format.FormatterRegistry; import org.springframework.hateoas.server.core.DefaultLinkRelationProvider; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.http.converter.ResourceHttpMessageConverter; import org.springframework.http.converter.StringHttpMessageConverter; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; import org.springframework.web.servlet.config.annotation.PathMatchConfigurer; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; @@ -63,51 +59,24 @@ * @author Mark Pollack * @author Christian Tzolov * @author David Turanski + * @author Michael Wirth + * @author Chris Bono + * @author Corneil du Plessis */ -@Configuration +@Configuration(proxyBeanMethods = false) @ConditionalOnWebApplication public class WebConfiguration implements ServletContextInitializer, ApplicationListener { - private static final org.slf4j.Logger logger = LoggerFactory.getLogger(WebConfiguration.class); - private static final String REL_PROVIDER_BEAN_NAME = "defaultRelProvider"; - @Value("${spring.datasource.url:#{null}}") - private String dataSourceUrl; - - private Server server = null; private LongTaskTimer.Sample longTaskSample; - public Server initH2TCPServer() { - logger.info("Starting H2 Server with URL: " + dataSourceUrl); - try { - this.server = Server - .createTcpServer("-ifNotExists", "-tcp", "-tcpAllowOthers", "-tcpPort", getH2Port(dataSourceUrl)) - .start(); - } - catch (SQLException e) { - throw new IllegalStateException(e); - } - return server; - } - - private String getH2Port(String url) { - String[] tokens = StringUtils.tokenizeToStringArray(url, ":"); - Assert.isTrue(tokens.length >= 5, "URL not properly formatted"); - return tokens[4].substring(0, tokens[4].indexOf("/")); - } - @Override public void onStartup(ServletContext servletContext) { LongTaskTimer longTaskTimer = LongTaskTimer .builder("spring.cloud.dataflow.server").description("Spring Cloud Data Flow duration timer") .tags(Tags.empty()).register(Metrics.globalRegistry); this.longTaskSample = longTaskTimer.start(); - - if (StringUtils.hasText(dataSourceUrl) && dataSourceUrl.startsWith("jdbc:h2:tcp://localhost:")) { - logger.info("Start Embedded H2"); - initH2TCPServer(); - } } @Bean @@ -127,6 +96,11 @@ public WebMvcConfigurer configurer() { public void configurePathMatch(PathMatchConfigurer configurer) { configurer.setUseSuffixPatternMatch(false); } + + @Override + public void addFormatters(FormatterRegistry registry) { + registry.addConverter(new AppBootVersionConverter()); + } }; } @@ -167,9 +141,6 @@ public void onApplicationEvent(ContextClosedEvent event) { this.longTaskSample.stop(); this.longTaskSample = null; } - if (this.server != null) { - this.server.stop(); - logger.info("Embedded H2 server stopped!"); - } } + } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AboutController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AboutController.java index cefdd6a464..bd25aa0fcd 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AboutController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AboutController.java @@ -17,14 +17,17 @@ import java.util.ArrayList; import java.util.List; - +import java.util.Map; import org.apache.http.conn.ssl.NoopHostnameVerifier; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - +import org.springframework.beans.factory.ObjectProvider; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.actuate.info.BuildInfoContributor; +import org.springframework.boot.actuate.info.GitInfoContributor; +import org.springframework.boot.actuate.info.Info; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.common.security.support.SecurityStateBean; import org.springframework.cloud.dataflow.core.Launcher; @@ -45,7 +48,6 @@ import org.springframework.cloud.deployer.spi.core.RuntimeEnvironmentInfo; import org.springframework.cloud.deployer.spi.task.TaskLauncher; import org.springframework.hateoas.server.ExposesResourceFor; -import org.springframework.hateoas.server.mvc.WebMvcLinkBuilder; import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; @@ -54,6 +56,7 @@ import org.springframework.security.core.Authentication; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; @@ -63,6 +66,8 @@ import org.springframework.web.client.ResourceAccessException; import org.springframework.web.client.RestTemplate; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; + /** * REST controller that provides meta information regarding the dataflow server and its * deployers. @@ -70,6 +75,7 @@ * @author Gunnar Hillert * @author Glenn Renfro * @author Ilayaperumal Gopinathan + * @author Felipe Gutierrez */ @RestController @RequestMapping("/about") @@ -100,14 +106,32 @@ public class AboutController { private DataflowMetricsProperties dataflowMetricsProperties; + private ObjectProvider gitInfoContributor; + + private ObjectProvider buildInfoContributor; + + @Deprecated + public AboutController(StreamDeployer streamDeployer, LauncherRepository launcherRepository, FeaturesProperties featuresProperties, + VersionInfoProperties versionInfoProperties, SecurityStateBean securityStateBean, DataflowMetricsProperties monitoringProperties) { + this.streamDeployer = streamDeployer; + this.launcherRepository = launcherRepository; + this.featuresProperties = featuresProperties; + this.versionInfoProperties = versionInfoProperties; + this.securityStateBean = securityStateBean; + this.dataflowMetricsProperties = monitoringProperties; + } + public AboutController(StreamDeployer streamDeployer, LauncherRepository launcherRepository, FeaturesProperties featuresProperties, - VersionInfoProperties versionInfoProperties, SecurityStateBean securityStateBean, DataflowMetricsProperties monitoringProperties) { + VersionInfoProperties versionInfoProperties, SecurityStateBean securityStateBean, DataflowMetricsProperties monitoringProperties, + ObjectProvider gitInfoContributor, ObjectProvider buildInfoContributor) { this.streamDeployer = streamDeployer; this.launcherRepository = launcherRepository; this.featuresProperties = featuresProperties; this.versionInfoProperties = versionInfoProperties; this.securityStateBean = securityStateBean; this.dataflowMetricsProperties = monitoringProperties; + this.gitInfoContributor = gitInfoContributor; + this.buildInfoContributor = buildInfoContributor; } /** @@ -221,7 +245,9 @@ else if (dashboard.getType() == MonitoringDashboardType.WAVEFRONT) { aboutResource.setMonitoringDashboardInfo(monitoringDashboardInfo); } - aboutResource.add(WebMvcLinkBuilder.linkTo(AboutController.class).withSelfRel()); + aboutResource.add(linkTo(AboutController.class).withSelfRel()); + + addGitAndBuildInfoIfAvailable(aboutResource); return aboutResource; } @@ -298,10 +324,9 @@ private String constructUrl(String url, String version) { } private String repoSelector(String version) { - final String REPO_SNAPSHOT_ROOT = "https://repo.spring.io/libs-snapshot"; - final String REPO_MILESTONE_ROOT = "https://repo.spring.io/libs-milestone"; - final String REPO_RELEASE_ROOT = "https://repo.spring.io/libs-release"; - final String MAVEN_ROOT = "https://repo1.maven.org/maven2"; + final String REPO_SNAPSHOT_ROOT = "https://repo.spring.io/snapshot"; + final String REPO_MILESTONE_ROOT = "https://repo.spring.io/milestone"; + final String MAVEN_ROOT = "https://repo.maven.apache.org/maven2"; String result = MAVEN_ROOT; if (version.endsWith("-SNAPSHOT")) { @@ -313,9 +338,17 @@ else if (version.contains(".M")) { else if (version.contains(".RC")) { result = REPO_MILESTONE_ROOT; } - else if (version.contains(".RELEASE")) { - result = REPO_RELEASE_ROOT; - } return result; } + + private void addGitAndBuildInfoIfAvailable(AboutResource aboutResource) { + Info.Builder builder = new Info.Builder(); + gitInfoContributor.ifAvailable(c -> c.contribute(builder)); + buildInfoContributor.ifAvailable(c -> c.contribute(builder)); + Map details = builder.build().getDetails(); + if (!ObjectUtils.isEmpty(details)) { + aboutResource.setGitAndBuildInfo(details); + } + } + } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/ApiNotSupportedException.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/ApiNotSupportedException.java new file mode 100644 index 0000000000..2cb0a2d214 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/ApiNotSupportedException.java @@ -0,0 +1,8 @@ +package org.springframework.cloud.dataflow.server.controller; + +public class ApiNotSupportedException extends RuntimeException { + + public ApiNotSupportedException(String message) { + super(message); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AppRegistryController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AppRegistryController.java index 02bc84fdba..2be29e5342 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AppRegistryController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AppRegistryController.java @@ -47,6 +47,7 @@ import org.springframework.cloud.dataflow.rest.SkipperStream; import org.springframework.cloud.dataflow.rest.resource.AppRegistrationResource; import org.springframework.cloud.dataflow.rest.resource.DetailedAppRegistrationResource; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.cloud.dataflow.server.controller.assembler.AppRegistrationAssemblerProvider; import org.springframework.cloud.dataflow.server.repository.InvalidApplicationNameException; import org.springframework.cloud.dataflow.server.repository.StreamDefinitionRepository; @@ -82,6 +83,7 @@ * @author Patrick Peralta * @author Thomas Risberg * @author Chris Schaefer + * @author Corneil du Plessis */ @RestController @RequestMapping("/apps") @@ -131,6 +133,7 @@ public AppRegistryController(Optional streamDefiniti * @param type the application type: source, sink, processor, task * @param version optional application version * @param search optional findByTaskNameContains parameter + * @param defaultVersion Indicator to use default version. * @return the list of registered applications */ @GetMapping @@ -221,17 +224,21 @@ else if (entry.getKey().equals("outbound")) { * @param type module type * @param name module name * @param version module version + * @param bootVersion module boot version or {@code null} to use the default * @param uri URI for the module artifact (e.g. {@literal maven://group:artifact:version}) * @param metadataUri URI for the metadata artifact * @param force if {@code true}, overwrites a pre-existing registration */ @RequestMapping(value = "/{type}/{name}/{version:.+}", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) - public void register(@PathVariable("type") ApplicationType type, @PathVariable("name") String name, + public void register( + @PathVariable("type") ApplicationType type, + @PathVariable("name") String name, @PathVariable("version") String version, - @RequestParam("uri") String uri, @RequestParam(name = "metadata-uri", required = false) String metadataUri, + @RequestParam(name = "bootVersion", required = false) String bootVersion, + @RequestParam("uri") String uri, + @RequestParam(name = "metadata-uri", required = false) String metadataUri, @RequestParam(value = "force", defaultValue = "false") boolean force) { - validateApplicationName(name); appRegistryService.validate(appRegistryService.getDefaultApp(name, type), uri, version); AppRegistration previous = appRegistryService.find(name, type, version); @@ -239,9 +246,15 @@ public void register(@PathVariable("type") ApplicationType type, @PathVariable(" throw new AppAlreadyRegisteredException(previous); } try { - AppRegistration registration = this.appRegistryService.save(name, type, version, new URI(uri), - metadataUri != null ? new URI(metadataUri) : null); - prefetchMetadata(Arrays.asList(registration)); + AppRegistration registration = this.appRegistryService.save( + name, + type, + version, + new URI(uri), + metadataUri != null ? new URI(metadataUri) : null, + bootVersion != null ? AppBootSchemaVersion.fromBootVersion(bootVersion) : AppBootSchemaVersion.defaultVersion() + ); + prefetchMetadata(Collections.singletonList(registration)); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); @@ -251,11 +264,23 @@ public void register(@PathVariable("type") ApplicationType type, @PathVariable(" @Deprecated @RequestMapping(value = "/{type}/{name}", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) - public void register(@PathVariable("type") ApplicationType type, @PathVariable("name") String name, - @RequestParam("uri") String uri, @RequestParam(name = "metadata-uri", required = false) String metadataUri, + public void register( + @PathVariable("type") ApplicationType type, + @PathVariable("name") String name, + @RequestParam(name = "bootVersion", required = false) String bootVersion, + @RequestParam("uri") String uri, + @RequestParam(name = "metadata-uri", required = false) String metadataUri, @RequestParam(value = "force", defaultValue = "false") boolean force) { String version = this.appRegistryService.getResourceVersion(uri); - this.register(type, name, version, uri, metadataUri, force); + this.register( + type, + name, + version, + bootVersion, + uri, + metadataUri, + force + ); } /** diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionController.java index 5f631249be..9d8a0a9c34 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionController.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016=2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,7 +16,6 @@ package org.springframework.cloud.dataflow.server.controller; -import java.util.List; import java.util.TimeZone; import org.springframework.batch.core.BatchStatus; @@ -29,17 +28,19 @@ import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.job.support.TimeUtils; import org.springframework.cloud.dataflow.rest.resource.JobExecutionResource; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.batch.JobService; import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; @@ -47,12 +48,16 @@ import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * Controller for operations on {@link org.springframework.batch.core.JobExecution}. This * includes obtaining Job execution information from the job explorer. * * @author Glenn Renfro * @author Gunnar Hillert + * @author Corneil du Plessis */ @RestController @RequestMapping("/jobs/executions") @@ -68,7 +73,7 @@ public class JobExecutionController { * a the {@link JobService} * * @param taskJobService the service this controller will use for retrieving job execution - * information. Must not be null. + * information. Must not be null. */ public JobExecutionController(TaskJobService taskJobService) { Assert.notNull(taskJobService, "taskJobService must not be null"); @@ -78,12 +83,14 @@ public JobExecutionController(TaskJobService taskJobService) { /** * Retrieve all task job executions with the task name specified * - * @param jobName name of the job. SQL server specific wildcards are enabled (eg.: myJob%, - * m_Job, ...) - * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param jobName name of the job. SQL server specific wildcards are enabled (eg.: myJob%, + * m_Job, ...) + * @param status Optional status criteria. + * @param pageable page-able collection of {@code TaskJobExecution}s. * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. - * @throws NoSuchJobException if the job with the given name does not exist. + * @throws NoSuchJobException if the job with the given name does not exist. + * @throws NoSuchJobExecutionException if the job execution doesn't exist. */ @RequestMapping(value = "", method = RequestMethod.GET, produces = "application/json") @ResponseStatus(HttpStatus.OK) @@ -91,19 +98,9 @@ public PagedModel retrieveJobsByParameters( @RequestParam(value = "name", required = false) String jobName, @RequestParam(value = "status", required = false) BatchStatus status, Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException, NoSuchJobExecutionException { - List jobExecutions; - Page page; - - if (jobName == null && status == null) { - jobExecutions = taskJobService.listJobExecutions(pageable); - page = new PageImpl<>(jobExecutions, pageable, taskJobService.countJobExecutions()); - } else { - jobExecutions = taskJobService.listJobExecutionsForJob(pageable, jobName, status); - page = new PageImpl<>(jobExecutions, pageable, - taskJobService.countJobExecutionsForJob(jobName, status)); - } - - return assembler.toModel(page, jobAssembler); + Page jobExecutions = jobName == null && status == null ? taskJobService.listJobExecutions(pageable) + : taskJobService.listJobExecutionsForJob(pageable, jobName, status); + return assembler.toModel(jobExecutions, jobAssembler); } /** @@ -112,14 +109,18 @@ public PagedModel retrieveJobsByParameters( * @param id the id of the requested {@link JobExecution} * @return the {@link JobExecution} * @throws NoSuchJobExecutionException if the specified job execution for the id does not - * exist. + * exist. */ @RequestMapping(value = "/{id}", method = RequestMethod.GET, produces = "application/json") @ResponseStatus(HttpStatus.OK) - public JobExecutionResource view(@PathVariable("id") long id) throws NoSuchJobExecutionException { - TaskJobExecution jobExecution = taskJobService.getJobExecution(id); + public JobExecutionResource view(@PathVariable("id") long id, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget) throws NoSuchJobExecutionException { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + TaskJobExecution jobExecution = taskJobService.getJobExecution(id, schemaTarget); if (jobExecution == null) { - throw new NoSuchJobExecutionException(String.format("No Job Execution with id of %d exits", id)); + throw new NoSuchJobExecutionException(String.format("No Job Execution with id of %d exits for schema target %s", id, schemaTarget)); } return jobAssembler.toModel(jobExecution); } @@ -130,14 +131,17 @@ public JobExecutionResource view(@PathVariable("id") long id) throws NoSuchJobEx * * @param jobExecutionId the executionId of the job execution to stop. * @throws JobExecutionNotRunningException if a stop is requested on a job that is not - * running. - * @throws NoSuchJobExecutionException if the job execution id specified does not exist. + * running. + * @throws NoSuchJobExecutionException if the job execution id specified does not exist. */ - @RequestMapping(value = { "/{executionId}" }, method = RequestMethod.PUT, params = "stop=true") - @ResponseStatus(HttpStatus.OK) - public void stopJobExecution(@PathVariable("executionId") long jobExecutionId) - throws NoSuchJobExecutionException, JobExecutionNotRunningException { - taskJobService.stopJobExecution(jobExecutionId); + @RequestMapping(value = {"/{executionId}"}, method = RequestMethod.PUT, params = "stop=true") + + public ResponseEntity stopJobExecution( + @PathVariable("executionId") long jobExecutionId, + @RequestParam(value = "schemaTarget", required = false) String schemaTarget + ) throws NoSuchJobExecutionException, JobExecutionNotRunningException { + taskJobService.stopJobExecution(jobExecutionId, schemaTarget); + return ResponseEntity.ok().build(); } /** @@ -146,17 +150,20 @@ public void stopJobExecution(@PathVariable("executionId") long jobExecutionId) * * @param jobExecutionId the executionId of the job execution to restart * @throws NoSuchJobExecutionException if the job execution for the jobExecutionId - * specified does not exist. + * specified does not exist. */ - @RequestMapping(value = { "/{executionId}" }, method = RequestMethod.PUT, params = "restart=true") + @RequestMapping(value = {"/{executionId}"}, method = RequestMethod.PUT, params = "restart=true") @ResponseStatus(HttpStatus.OK) - public void restartJobExecution(@PathVariable("executionId") long jobExecutionId) - throws NoSuchJobExecutionException { - taskJobService.restartJobExecution(jobExecutionId); + public ResponseEntity restartJobExecution( + @PathVariable("executionId") long jobExecutionId, + @RequestParam(value = "schemaTarget", required = false) String schemaTarget + ) throws NoSuchJobExecutionException { + taskJobService.restartJobExecution(jobExecutionId, schemaTarget); + return ResponseEntity.ok().build(); } /** - * {@link org.springframework.hateoas.server.ResourceAssembler} implementation that + * {@link org.springframework.hateoas.server.RepresentationModelAssembler} implementation that * converts {@link JobExecution}s to {@link JobExecutionResource}s. */ private static class Assembler extends RepresentationModelAssemblerSupport { @@ -178,12 +185,24 @@ public void setTimeZone(TimeZone timeZone) { @Override public JobExecutionResource toModel(TaskJobExecution taskJobExecution) { - return createModelWithId(taskJobExecution.getJobExecution().getId(), taskJobExecution); + return instantiateModel(taskJobExecution); } @Override public JobExecutionResource instantiateModel(TaskJobExecution taskJobExecution) { - return new JobExecutionResource(taskJobExecution, timeZone); + JobExecutionResource resource = new JobExecutionResource(taskJobExecution, timeZone); + try { + resource.add(linkTo(methodOn(JobExecutionController.class).view(taskJobExecution.getTaskId(), taskJobExecution.getSchemaTarget())).withSelfRel()); + if (taskJobExecution.getJobExecution().isRunning()) { + resource.add(linkTo(methodOn(JobExecutionController.class).stopJobExecution(taskJobExecution.getJobExecution().getJobId(), taskJobExecution.getSchemaTarget())).withRel("stop")); + } + if (!taskJobExecution.getJobExecution().getStatus().equals(BatchStatus.COMPLETED)) { + resource.add(linkTo(methodOn(JobExecutionController.class).restartJobExecution(taskJobExecution.getJobExecution().getJobId(), taskJobExecution.getSchemaTarget())).withRel("restart")); + } + } catch (NoSuchJobExecutionException | JobExecutionNotRunningException e) { + throw new RuntimeException(e); + } + return resource; } } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinController.java index 1609193fcc..d61fc8fb31 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinController.java @@ -1,5 +1,5 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,10 +17,10 @@ package org.springframework.cloud.dataflow.server.controller; import java.util.Date; -import java.util.List; import java.util.TimeZone; import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.launch.JobExecutionNotRunningException; import org.springframework.batch.core.launch.NoSuchJobException; import org.springframework.batch.core.launch.NoSuchJobExecutionException; import org.springframework.beans.factory.annotation.Autowired; @@ -28,10 +28,10 @@ import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.job.support.TimeUtils; import org.springframework.cloud.dataflow.rest.resource.JobExecutionThinResource; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.batch.JobService; import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; import org.springframework.format.annotation.DateTimeFormat; @@ -40,17 +40,22 @@ import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * Controller for retrieving {@link JobExecution}s where the step executions are * not included in the results that are returned. * * @author Glenn Renfro + * @author Corneil du Plessis * * @since 2.0 */ @@ -68,7 +73,7 @@ public class JobExecutionThinController { * from a the {@link JobService} * * @param taskJobService the service this controller will use for retrieving job - * execution information. Must not be null. + * execution information. Must not be null. */ @Autowired public JobExecutionThinController(TaskJobService taskJobService) { @@ -80,104 +85,115 @@ public JobExecutionThinController(TaskJobService taskJobService) { * Return a page-able list of {@link JobExecutionThinResource} defined jobs that * do not contain step execution detail. * - * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param pageable page-able collection of {@code TaskJobExecution}s. * @param assembler for the {@link TaskJobExecution}s * @return a list of Task/Job executions(job executions do not contain step executions. * @throws NoSuchJobExecutionException in the event that a job execution id specified - * is not present when looking up stepExecutions for the result. + * is not present when looking up stepExecutions for the result. */ @RequestMapping(value = "", method = RequestMethod.GET, produces = "application/json") @ResponseStatus(HttpStatus.OK) public PagedModel listJobsOnly(Pageable pageable, - PagedResourcesAssembler assembler) throws NoSuchJobExecutionException { - List jobExecutions = taskJobService.listJobExecutionsWithStepCount(pageable); - Page page = new PageImpl<>(jobExecutions, pageable, taskJobService.countJobExecutions()); - return assembler.toModel(page, jobAssembler); + PagedResourcesAssembler assembler) throws NoSuchJobExecutionException { + Page jobExecutions = taskJobService.listJobExecutionsWithStepCount(pageable); + return assembler.toModel(jobExecutions, jobAssembler); } + /** * Retrieve all task job executions with the task name specified * - * @param jobName name of the job - * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param jobName name of the job + * @param pageable page-able collection of {@code TaskJobExecution}s. * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. * @throws NoSuchJobException if the job with the given name does not exist. */ @RequestMapping(value = "", method = RequestMethod.GET, params = "name", produces = "application/json") @ResponseStatus(HttpStatus.OK) - public PagedModel retrieveJobsByName(@RequestParam("name") String jobName, - Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException { - List jobExecutions = taskJobService.listJobExecutionsForJobWithStepCount(pageable, jobName); - Page page = new PageImpl<>(jobExecutions, pageable, - taskJobService.countJobExecutionsForJob(jobName, null)); - return assembler.toModel(page, jobAssembler); + public PagedModel retrieveJobsByName( + @RequestParam("name") String jobName, + Pageable pageable, + PagedResourcesAssembler assembler) throws NoSuchJobException { + Page jobExecutions = taskJobService.listJobExecutionsForJobWithStepCount(pageable, jobName); + return assembler.toModel(jobExecutions, jobAssembler); } /** * Retrieve all task job executions filtered with the date range specified * - * @param fromDate the date which start date must be greater than. - * @param toDate the date which start date must be less than. - * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param fromDate the date which start date must be greater than. + * @param toDate the date which start date must be less than. + * @param pageable page-able collection of {@code TaskJobExecution}s. * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. * @throws NoSuchJobException if the job with the given name does not exist. */ - @RequestMapping(value = "", method = RequestMethod.GET, params = { "fromDate", - "toDate" }, produces = "application/json") + @RequestMapping(value = "", method = RequestMethod.GET, params = {"fromDate", + "toDate"}, produces = "application/json") @ResponseStatus(HttpStatus.OK) public PagedModel retrieveJobsByDateRange( @RequestParam("fromDate") @DateTimeFormat(pattern = TimeUtils.DEFAULT_DATAFLOW_DATE_TIME_PARAMETER_FORMAT_PATTERN) Date fromDate, @RequestParam("toDate") @DateTimeFormat(pattern = TimeUtils.DEFAULT_DATAFLOW_DATE_TIME_PARAMETER_FORMAT_PATTERN) Date toDate, - Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException { - List jobExecutions = taskJobService.listJobExecutionsForJobWithStepCount(pageable, fromDate, - toDate); - Page page = new PageImpl<>(jobExecutions, pageable, jobExecutions.size()); - return assembler.toModel(page, jobAssembler); + Pageable pageable, + PagedResourcesAssembler assembler + ) throws NoSuchJobException { + Page jobExecutions = taskJobService.listJobExecutionsForJobWithStepCount(pageable, fromDate, toDate); + return assembler.toModel(jobExecutions, jobAssembler); } /** * Retrieve all task job executions filtered with the job instance id specified * * @param jobInstanceId the job instance id associated with the execution. - * @param pageable page-able collection of {@code TaskJobExecution}s. - * @param assembler for the {@link TaskJobExecution}s + * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. * @throws NoSuchJobException if the job with the given name does not exist. */ @RequestMapping(value = "", method = RequestMethod.GET, params = "jobInstanceId", produces = "application/json") @ResponseStatus(HttpStatus.OK) public PagedModel retrieveJobsByJobInstanceId( - @RequestParam("jobInstanceId") int jobInstanceId, Pageable pageable, + @RequestParam("jobInstanceId") int jobInstanceId, + @RequestParam(value = "schemaTarget", required = false) String schemaTarget, + Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException { - List jobExecutions = taskJobService - .listJobExecutionsForJobWithStepCountFilteredByJobInstanceId(pageable, jobInstanceId); - Page page = new PageImpl<>(jobExecutions, pageable, jobExecutions.size()); - return assembler.toModel(page, jobAssembler); + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + Page jobExecutions = taskJobService + .listJobExecutionsForJobWithStepCountFilteredByJobInstanceId(pageable, jobInstanceId, schemaTarget); + return assembler.toModel(jobExecutions, jobAssembler); } /** * Retrieve all task job executions filtered with the task execution id specified * * @param taskExecutionId the task execution id associated with the execution. - * @param pageable page-able collection of {@code TaskJobExecution}s. - * @param assembler for the {@link TaskJobExecution}s + * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. * @throws NoSuchJobException if the job with the given name does not exist. */ @RequestMapping(value = "", method = RequestMethod.GET, params = "taskExecutionId", produces = "application/json") @ResponseStatus(HttpStatus.OK) public PagedModel retrieveJobsByTaskExecutionId( - @RequestParam("taskExecutionId") int taskExecutionId, Pageable pageable, + @RequestParam("taskExecutionId") int taskExecutionId, + @RequestParam(value = "schemaTarget", required = false) String schemaTarget, + Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException { - List jobExecutions = taskJobService - .listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId(pageable, taskExecutionId); - Page page = new PageImpl<>(jobExecutions, pageable, jobExecutions.size()); - return assembler.toModel(page, jobAssembler); + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + Page jobExecutions = taskJobService.listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId( + pageable, + taskExecutionId, + schemaTarget + ); + return assembler.toModel(jobExecutions, jobAssembler); } /** - * {@link org.springframework.hateoas.server.ResourceAssembler} implementation that converts + * {@link org.springframework.hateoas.server.RepresentationModelAssembler} implementation that converts * {@link JobExecution}s to {@link JobExecutionThinResource}s. */ private static class Assembler extends RepresentationModelAssemblerSupport { @@ -199,12 +215,24 @@ public void setTimeZone(TimeZone timeZone) { @Override public JobExecutionThinResource toModel(TaskJobExecution taskJobExecution) { - return createModelWithId(taskJobExecution.getJobExecution().getId(), taskJobExecution); + return instantiateModel(taskJobExecution); } @Override public JobExecutionThinResource instantiateModel(TaskJobExecution taskJobExecution) { - return new JobExecutionThinResource(taskJobExecution, timeZone); + JobExecutionThinResource resource = new JobExecutionThinResource(taskJobExecution, timeZone); + try { + resource.add(linkTo(methodOn(JobExecutionController.class).view(taskJobExecution.getTaskId(), taskJobExecution.getSchemaTarget())).withSelfRel()); + if (taskJobExecution.getJobExecution().isRunning()) { + resource.add(linkTo(methodOn(JobExecutionController.class).stopJobExecution(taskJobExecution.getJobExecution().getJobId(), taskJobExecution.getSchemaTarget())).withRel("stop")); + } + if (taskJobExecution.getJobExecution().getEndTime() != null && !taskJobExecution.getJobExecution().isRunning()) { + resource.add(linkTo(methodOn(JobExecutionController.class).restartJobExecution(taskJobExecution.getJobExecution().getJobId(), taskJobExecution.getSchemaTarget())).withRel("restart")); + } + } catch (NoSuchJobExecutionException | JobExecutionNotRunningException e) { + throw new RuntimeException(e); + } + return resource; } } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobInstanceController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobInstanceController.java index c58ccd6f33..0cd9a16ac5 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobInstanceController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobInstanceController.java @@ -31,9 +31,9 @@ import org.springframework.cloud.dataflow.rest.job.support.TimeUtils; import org.springframework.cloud.dataflow.rest.resource.JobExecutionResource; import org.springframework.cloud.dataflow.rest.resource.JobInstanceResource; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; import org.springframework.hateoas.PagedModel; @@ -41,6 +41,7 @@ import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; @@ -68,7 +69,7 @@ public class JobInstanceController { * Creates a {@code JobInstanceController} that retrieves Job Instance information. * * @param taskJobService the {@link TaskJobService} used for retrieving batch instance - * data. + * data. */ @Autowired public JobInstanceController(TaskJobService taskJobService) { @@ -79,20 +80,20 @@ public JobInstanceController(TaskJobService taskJobService) { /** * Return a page-able list of {@link JobInstanceResource} defined jobs. * - * @param jobName the name of the job - * @param pageable page-able collection of {@link JobInstance}s. + * @param jobName the name of the job + * @param pageable page-able collection of {@link JobInstance}s. * @param assembler for the {@link JobInstance}s * @return a list of Job Instance * @throws NoSuchJobException if the job for jobName specified does not exist. */ @RequestMapping(value = "", method = RequestMethod.GET, params = "name") @ResponseStatus(HttpStatus.OK) - public PagedModel list(@RequestParam("name") String jobName, Pageable pageable, + public PagedModel list( + @RequestParam("name") String jobName, + Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException { - List jobInstances = taskJobService.listTaskJobInstancesForJobName(pageable, jobName); - Page page = new PageImpl<>(jobInstances, pageable, - taskJobService.countJobInstances(jobName)); - return assembler.toModel(page, jobAssembler); + Page jobInstances = taskJobService.listTaskJobInstancesForJobName(pageable, jobName); + return assembler.toModel(jobInstances, jobAssembler); } /** @@ -101,12 +102,21 @@ public PagedModel list(@RequestParam("name") String jobName * @param id the id of the requested {@link JobInstance} * @return the {@link JobInstance} * @throws NoSuchJobInstanceException if job instance for the id does not exist. - * @throws NoSuchJobException if the job for the job instance does not exist. + * @throws NoSuchJobException if the job for the job instance does not exist. */ @RequestMapping(value = "/{id}", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public JobInstanceResource view(@PathVariable("id") long id) throws NoSuchJobInstanceException, NoSuchJobException { - JobInstanceExecutions jobInstance = taskJobService.getJobInstance(id); + public JobInstanceResource view( + @PathVariable("id") long id, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget + ) throws NoSuchJobInstanceException, NoSuchJobException { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + JobInstanceExecutions jobInstance = taskJobService.getJobInstance(id, schemaTarget); + if (jobInstance == null) { + throw new NoSuchJobInstanceException(String.format("No job instance for id '%d' and schema target '%s'", id, schemaTarget)); + } return jobAssembler.toModel(jobInstance); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionController.java index 73a700c241..5a4c121e4f 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionController.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,9 +24,11 @@ import org.springframework.batch.core.launch.NoSuchJobExecutionException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.dataflow.rest.resource.StepExecutionResource; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.batch.JobService; import org.springframework.cloud.dataflow.server.batch.NoSuchStepExecutionException; import org.springframework.cloud.dataflow.server.job.support.StepExecutionResourceBuilder; +import org.springframework.cloud.dataflow.server.service.JobServiceContainer; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; @@ -36,35 +38,34 @@ import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; /** * @author Glenn Renfro + * @author Corneil du Plessis */ @RestController @RequestMapping("/jobs/executions/{jobExecutionId}/steps") @ExposesResourceFor(StepExecutionResource.class) public class JobStepExecutionController { - private final JobService jobService; - - private final Assembler stepAssembler = new Assembler(); - + private final JobServiceContainer jobServiceContainer; /** * Creates a {@code JobStepExecutionsController} that retrieves Job Step Execution - * information from a the {@link JobService} + * information from a the {@link JobServiceContainer} * - * @param jobService the service this controller will use for retrieving job step - * execution information. + * @param jobServiceContainer JobServiceContainer to select the JobService */ @Autowired - public JobStepExecutionController(JobService jobService) { - Assert.notNull(jobService, "repository must not be null"); - this.jobService = jobService; + public JobStepExecutionController(JobServiceContainer jobServiceContainer) { + Assert.notNull(jobServiceContainer, "jobServiceContainer required"); + this.jobServiceContainer = jobServiceContainer; } /** @@ -79,11 +80,19 @@ public JobStepExecutionController(JobService jobService) { */ @RequestMapping(value = { "" }, method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public PagedModel stepExecutions(@PathVariable("jobExecutionId") long id, - Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobExecutionException { - List result; - result = new ArrayList<>(jobService.getStepExecutions(id)); + public PagedModel stepExecutions( + @PathVariable("jobExecutionId") long id, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget, + Pageable pageable, + PagedResourcesAssembler assembler + ) throws NoSuchJobExecutionException { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + JobService jobService = jobServiceContainer.get(schemaTarget); + List result = new ArrayList<>(jobService.getStepExecutions(id)); Page page = new PageImpl<>(result, pageable, result.size()); + final Assembler stepAssembler = new Assembler(schemaTarget); return assembler.toModel(page, stepAssembler); } @@ -99,30 +108,39 @@ public PagedModel stepExecutions(@PathVariable("jobExecut */ @RequestMapping(value = { "/{stepExecutionId}" }, method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public StepExecutionResource getStepExecution(@PathVariable("jobExecutionId") Long id, - @PathVariable("stepExecutionId") Long stepId) + public StepExecutionResource getStepExecution( + @PathVariable("jobExecutionId") Long id, + @PathVariable("stepExecutionId") Long stepId, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget) throws NoSuchStepExecutionException, NoSuchJobExecutionException { - return stepAssembler.toModel(jobService.getStepExecution(id, stepId)); + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + JobService jobService = jobServiceContainer.get(schemaTarget); + StepExecution stepExecution = jobService.getStepExecution(id, stepId); + final Assembler stepAssembler = new Assembler(schemaTarget); + return stepAssembler.toModel(stepExecution); } /** - * {@link org.springframework.hateoas.server.ResourceAssembler} implementation that converts + * {@link org.springframework.hateoas.server.RepresentationModelAssembler} implementation that converts * {@link StepExecution}s to {@link StepExecutionResource}s. */ private static class Assembler extends RepresentationModelAssemblerSupport { - - public Assembler() { + private final String schemaTarget; + public Assembler(String schemaTarget) { super(JobStepExecutionController.class, StepExecutionResource.class); + this.schemaTarget = schemaTarget; } @Override public StepExecutionResource toModel(StepExecution stepExecution) { - return createModelWithId(stepExecution.getId(), stepExecution, stepExecution.getJobExecution().getId()); + return StepExecutionResourceBuilder.toModel(stepExecution, schemaTarget); } @Override public StepExecutionResource instantiateModel(StepExecution stepExecution) { - return StepExecutionResourceBuilder.toModel(stepExecution); + return StepExecutionResourceBuilder.toModel(stepExecution, schemaTarget); } } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionProgressController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionProgressController.java index 40e18ca258..388dec86d4 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionProgressController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionProgressController.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,61 +22,77 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.dataflow.rest.job.StepExecutionHistory; import org.springframework.cloud.dataflow.rest.resource.StepExecutionProgressInfoResource; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.batch.JobService; import org.springframework.cloud.dataflow.server.batch.NoSuchStepExecutionException; import org.springframework.cloud.dataflow.server.job.support.StepExecutionProgressInfo; +import org.springframework.cloud.dataflow.server.service.JobServiceContainer; +import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; -import org.springframework.util.Assert; +import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * @author Glenn Renfro + * @author Corneil du Plessis */ @RestController @RequestMapping("/jobs/executions/{jobExecutionId}/steps") @ExposesResourceFor(StepExecutionProgressInfoResource.class) public class JobStepExecutionProgressController { - private final JobService jobService; + private final TaskJobService taskJobService; - private final Assembler stepAssembler = new Assembler(); + private final JobServiceContainer jobServiceContainer; /** * Creates a {@code JobStepProgressInfoExecutionsController} that retrieves Job Step - * Progress Execution information from a the {@link JobService} + * Progress Execution information from a the {@link JobServiceContainer} * - * @param jobService the service this controller will use for retrieving job step - * progress execution information. + * @param jobServiceContainer A container of JobServices that this controller will use for retrieving job step + * progress execution information. + * @param taskJobService Queries both schemas. */ @Autowired - public JobStepExecutionProgressController(JobService jobService) { - Assert.notNull(jobService, "repository must not be null"); - this.jobService = jobService; + public JobStepExecutionProgressController(JobServiceContainer jobServiceContainer, TaskJobService taskJobService) { + this.taskJobService = taskJobService; + this.jobServiceContainer = jobServiceContainer; } /** * Get the step execution progress for the given jobExecutions step. * - * @param jobExecutionId Id of the {@link JobExecution}, must not be null + * @param jobExecutionId Id of the {@link JobExecution}, must not be null * @param stepExecutionId Id of the {@link StepExecution}, must not be null * @return {@link StepExecutionProgressInfoResource} that has the progress info on the * given {@link StepExecution}. - * @throws NoSuchJobExecutionException Thrown if the respective {@link JobExecution} - * does not exist + * @throws NoSuchJobExecutionException Thrown if the respective {@link JobExecution} + * does not exist * @throws NoSuchStepExecutionException Thrown if the respective {@link StepExecution} - * does not exist + * does not exist */ @RequestMapping(value = "/{stepExecutionId}/progress", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public StepExecutionProgressInfoResource progress(@PathVariable long jobExecutionId, - @PathVariable long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException { + public StepExecutionProgressInfoResource progress( + @PathVariable long jobExecutionId, + @PathVariable long stepExecutionId, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget + ) throws NoSuchStepExecutionException, NoSuchJobExecutionException { try { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + JobService jobService = jobServiceContainer.get(schemaTarget); StepExecution stepExecution = jobService.getStepExecution(jobExecutionId, stepExecutionId); String stepName = stepExecution.getStepName(); if (stepName.contains(":partition")) { @@ -84,13 +100,12 @@ public StepExecutionProgressInfoResource progress(@PathVariable long jobExecutio stepName = stepName.replaceAll("(:partition).*", "$1*"); } String jobName = stepExecution.getJobExecution().getJobInstance().getJobName(); - StepExecutionHistory stepExecutionHistory = computeHistory(jobName, stepName); + StepExecutionHistory stepExecutionHistory = computeHistory(jobName, stepName, schemaTarget); + final Assembler stepAssembler = new Assembler(schemaTarget); return stepAssembler.toModel(new StepExecutionProgressInfo(stepExecution, stepExecutionHistory)); - } - catch (NoSuchStepExecutionException e) { + } catch (NoSuchStepExecutionException e) { throw new NoSuchStepExecutionException(String.valueOf(stepExecutionId)); - } - catch (NoSuchJobExecutionException e) { + } catch (NoSuchJobExecutionException e) { throw new NoSuchJobExecutionException(String.valueOf(jobExecutionId)); } } @@ -98,11 +113,12 @@ public StepExecutionProgressInfoResource progress(@PathVariable long jobExecutio /** * Compute step execution history for the given jobs step. * - * @param jobName the name of the job + * @param jobName the name of the job * @param stepName the name of the step * @return the step execution history for the given step */ - private StepExecutionHistory computeHistory(String jobName, String stepName) { + private StepExecutionHistory computeHistory(String jobName, String stepName, String schemaTarget) { + JobService jobService = jobServiceContainer.get(schemaTarget); int total = jobService.countStepExecutionsForStep(jobName, stepName); StepExecutionHistory stepExecutionHistory = new StepExecutionHistory(stepName); for (int i = 0; i < total; i += 1000) { @@ -114,14 +130,16 @@ private StepExecutionHistory computeHistory(String jobName, String stepName) { } /** - * {@link org.springframework.hateoas.server.ResourceAssembler} implementation that converts + * {@link org.springframework.hateoas.server.RepresentationModelAssembler} implementation that converts * {@link StepExecutionProgressInfo}s to a {@link StepExecutionProgressInfoResource}. */ private static class Assembler extends RepresentationModelAssemblerSupport { + private final String schemaTarget; - public Assembler() { + public Assembler(String schemaTarget) { super(JobStepExecutionProgressController.class, StepExecutionProgressInfoResource.class); + this.schemaTarget = schemaTarget; } @Override @@ -132,8 +150,23 @@ public StepExecutionProgressInfoResource toModel(StepExecutionProgressInfo entit @Override protected StepExecutionProgressInfoResource instantiateModel(StepExecutionProgressInfo entity) { - return new StepExecutionProgressInfoResource(entity.getStepExecution(), entity.getStepExecutionHistory(), + StepExecutionProgressInfoResource resource = new StepExecutionProgressInfoResource(entity.getStepExecution(), entity.getStepExecutionHistory(), entity.getEstimatedPercentComplete(), entity.isFinished(), entity.getDuration()); + addLink(resource); + return resource; + } + + private void addLink(StepExecutionProgressInfoResource resource) { + try { + resource.add( + linkTo( + methodOn(JobStepExecutionProgressController.class) + .progress(resource.getStepExecution().getJobExecutionId(), resource.getStepExecution().getId(), schemaTarget) + ).withRel("progress") + ); + } catch (NoSuchStepExecutionException | NoSuchJobExecutionException e) { + throw new RuntimeException(e); + } } } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/NoSuchSchemaTargetException.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/NoSuchSchemaTargetException.java new file mode 100644 index 0000000000..aab2b520e3 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/NoSuchSchemaTargetException.java @@ -0,0 +1,30 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.controller; + +/** + * Exception will be thrown by query for {@link org.springframework.cloud.dataflow.schema.SchemaVersionTarget} + * that doesn't exist. + * @author Corneil du Plessis + */ +public class NoSuchSchemaTargetException extends RuntimeException { + private static final long serialVersionUID = 1L; + + public NoSuchSchemaTargetException(String versionTargetName) { + super(String.format("SchemaVersionTarget: %s not found", versionTargetName)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RestControllerAdvice.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RestControllerAdvice.java index eaa91d68dc..0ebe4f09d2 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RestControllerAdvice.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RestControllerAdvice.java @@ -95,14 +95,20 @@ public VndErrors onException(Exception e) { * * @param e one of the exceptions, {@link AppAlreadyRegisteredException}, * {@link DuplicateStreamDefinitionException}, {@link DuplicateTaskException}, - * {@link StreamAlreadyDeployedException}, {@link StreamAlreadyDeployingException}, or - * {@link StreamAlreadyDeployingException} + * {@link StreamAlreadyDeployedException}, {@link StreamAlreadyDeployingException}, + * {@link StreamAlreadyDeployingException}, or {@link ApiNotSupportedException} * @return the error response in JSON format with media type * application/vnd.error+json */ - @ExceptionHandler({ AppAlreadyRegisteredException.class, DuplicateStreamDefinitionException.class, - DuplicateTaskException.class, StreamAlreadyDeployedException.class, StreamAlreadyDeployingException.class, - UnregisterAppException.class, InvalidCTRLaunchRequestException.class}) + @ExceptionHandler({ + AppAlreadyRegisteredException.class, + DuplicateStreamDefinitionException.class, + DuplicateTaskException.class, + StreamAlreadyDeployedException.class, + StreamAlreadyDeployingException.class, + UnregisterAppException.class, + InvalidCTRLaunchRequestException.class + }) @ResponseStatus(HttpStatus.CONFLICT) @ResponseBody public VndErrors onConflictException(Exception e) { @@ -145,8 +151,9 @@ public VndErrors onUnprocessableEntityException(Exception e) { * {@link NoSuchTaskExecutionException}, {@link NoSuchJobExecutionException}, * {@link NoSuchJobInstanceException}, {@link NoSuchJobException}, * {@link NoSuchStepExecutionException}, - * {@link NoSuchAppException}, or - * {@link NoSuchAppInstanceException} + * {@link NoSuchAppException}, + * {@link NoSuchAppInstanceException}, or + *{@link NoSuchSchemaTargetException} * @return the error response in JSON format with media type * application/vnd.error+json */ @@ -155,7 +162,9 @@ public VndErrors onUnprocessableEntityException(Exception e) { NoSuchTaskDefinitionException.class, NoSuchTaskExecutionException.class, NoSuchJobExecutionException.class, NoSuchJobInstanceException.class, NoSuchJobException.class, NoSuchStepExecutionException.class, NoSuchTaskBatchException.class, NoSuchAppException.class, NoSuchAppInstanceException.class, - NoSuchScheduleException.class }) + NoSuchScheduleException.class, + NoSuchSchemaTargetException.class + }) @ResponseStatus(HttpStatus.NOT_FOUND) @ResponseBody public VndErrors onNotFoundException(Exception e) { @@ -179,7 +188,7 @@ public VndErrors onNotFoundException(Exception e) { * @return the error response in JSON format with media type * application/vnd.error+json */ - @ExceptionHandler({ MissingServletRequestParameterException.class, HttpMessageNotReadableException.class, + @ExceptionHandler({ ApiNotSupportedException.class,MissingServletRequestParameterException.class, HttpMessageNotReadableException.class, UnsatisfiedServletRequestParameterException.class, MethodArgumentTypeMismatchException.class, InvalidDateRangeException.class, CannotDeleteNonParentTaskExecutionException.class, InvalidStreamDefinitionException.class, CreateScheduleException.class, OffsetOutOfBoundsException.class, diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RootController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RootController.java index 0ef1e22794..d45beb0a95 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RootController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RootController.java @@ -35,6 +35,7 @@ import org.springframework.cloud.dataflow.rest.resource.TaskAppStatusResource; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource; +import org.springframework.cloud.dataflow.rest.resource.TaskExecutionThinResource; import org.springframework.cloud.dataflow.rest.resource.TaskToolsResource; import org.springframework.cloud.dataflow.rest.resource.about.AboutResource; import org.springframework.cloud.dataflow.server.config.features.FeaturesProperties; @@ -42,10 +43,12 @@ import org.springframework.hateoas.RepresentationModel; import org.springframework.hateoas.server.EntityLinks; import org.springframework.hateoas.server.ExposesResourceFor; -import org.springframework.hateoas.server.mvc.WebMvcLinkBuilder; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * Controller for the root resource of the Data Flow server. * @@ -54,6 +57,7 @@ * @author Glenn Renfro * @author Mark Fisher * @author Gunnar Hillert + * @author Corneil du Plessis */ @RestController @EnableConfigurationProperties(FeaturesProperties.class) @@ -87,8 +91,11 @@ public RootController(EntityLinks entityLinks) { public RootResource info() { RootResource root = new RootResource(Version.REVISION); - root.add(WebMvcLinkBuilder.linkTo(UiController.class).withRel("dashboard")); - root.add(WebMvcLinkBuilder.linkTo(AuditRecordController.class).withRel("audit-records")); + root.add(linkTo(UiController.class).withRel("dashboard")); + root.add(linkTo(AuditRecordController.class).withRel("audit-records")); + + root.add(linkTo(methodOn(SchemaController.class).getVersions()).withRel("schema/versions")); + root.add(linkTo(methodOn(SchemaController.class).getTargets()).withRel("schema/targets")); if (featuresProperties.isStreamsEnabled()) { root.add(entityLinks.linkToCollectionResource(StreamDefinitionResource.class) @@ -99,48 +106,60 @@ public RootResource info() { root.add(unescapeTemplateVariables(entityLinks.linkToItemResource(StreamAppStatusResource.class, "{name}") .withRel("streams/validation"))); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(RuntimeStreamsController.class).status(null, null, null)).withRel("runtime/streams")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(RuntimeStreamsController.class).streamStatus(null, null, null)).withRel("runtime/streams/{streamNames}")); + root.add(linkTo(methodOn(RuntimeStreamsController.class).status(null, null, null)).withRel("runtime/streams")); + root.add(linkTo(methodOn(RuntimeStreamsController.class).streamStatus(null, null, null)).withRel("runtime/streams/{streamNames}")); + + root.add(linkTo(methodOn(RuntimeAppsController.class).list(null, null)).withRel("runtime/apps")); + root.add(linkTo(methodOn(RuntimeAppsController.class).display(null)).withRel("runtime/apps/{appId}")); + + root.add(linkTo(methodOn(RuntimeAppInstanceController.class).list(null, null, null)).withRel("runtime/apps/{appId}/instances")); + root.add(linkTo(methodOn(RuntimeAppInstanceController.class).display(null, null)).withRel("runtime/apps/{appId}/instances/{instanceId}")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(RuntimeAppsController.class).list(null, null)).withRel("runtime/apps")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(RuntimeAppsController.class).display(null)).withRel("runtime/apps/{appId}")); + root.add(linkTo(methodOn(RuntimeAppInstanceController.class) + .getFromActuator(null, null, null)).withRel("runtime/apps/{appId}/instances/{instanceId}/actuator")); + root.add(linkTo(methodOn(RuntimeAppInstanceController.class) + .postToActuator(null, null, null)).withRel("runtime/apps/{appId}/instances/{instanceId}/actuator")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(RuntimeAppInstanceController.class).list(null, null, null)).withRel("runtime/apps/{appId}/instances")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(RuntimeAppInstanceController.class).display(null, null)).withRel("runtime/apps/{appId}/instances/{instanceId}")); + root.add(linkTo(methodOn(RuntimeAppInstanceController.class) + .postToUrl(null,null, null, null)).withRel("runtime/apps/{appId}/instances/{instanceId}/post")); - root.add(WebMvcLinkBuilder.linkTo(StreamDeploymentController.class).withRel("streams/deployments")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).info(null, false)).withRel("streams/deployments/{name}{?reuse-deployment-properties}")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).deploy(null, null)).withRel("streams/deployments/{name}")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).history(null)).withRel("streams/deployments/history/{name}")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).manifest(null, null)).withRel("streams/deployments/manifest/{name}/{version}")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).platformList()).withRel("streams/deployments/platform/list")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).rollback(null, null)).withRel("streams/deployments/rollback/{name}/{version}")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).update(null, null)).withRel("streams/deployments/update/{name}")); + root.add(linkTo(StreamDeploymentController.class).withRel("streams/deployments")); + root.add(linkTo(methodOn(StreamDeploymentController.class).info(null, false)).withRel("streams/deployments/{name}{?reuse-deployment-properties}")); + root.add(linkTo(methodOn(StreamDeploymentController.class).deploy(null, null)).withRel("streams/deployments/{name}")); + root.add(linkTo(methodOn(StreamDeploymentController.class).history(null)).withRel("streams/deployments/history/{name}")); + root.add(linkTo(methodOn(StreamDeploymentController.class).manifest(null, null)).withRel("streams/deployments/manifest/{name}/{version}")); + root.add(linkTo(methodOn(StreamDeploymentController.class).platformList()).withRel("streams/deployments/platform/list")); + root.add(linkTo(methodOn(StreamDeploymentController.class).rollback(null, null)).withRel("streams/deployments/rollback/{name}/{version}")); + root.add(linkTo(methodOn(StreamDeploymentController.class).update(null, null)).withRel("streams/deployments/update/{name}")); root.add(unescapeTemplateVariables(entityLinks.linkToItemResource(StreamDeploymentResource.class, "{name}").withRel("streams/deployments/deployment"))); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).scaleApplicationInstances(null, null, null, null)).withRel("streams/deployments/scale/{streamName}/{appName}/instances/{count}")); - root.add(WebMvcLinkBuilder.linkTo(StreamLogsController.class).withRel("streams/logs")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamLogsController.class).getLog(null)).withRel("streams/logs/{streamName}")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamLogsController.class).getLog(null, null)).withRel("streams/logs/{streamName}/{appName}")); + root.add(linkTo(methodOn(StreamDeploymentController.class).scaleApplicationInstances(null, null, null, null)).withRel("streams/deployments/scale/{streamName}/{appName}/instances/{count}")); + root.add(linkTo(StreamLogsController.class).withRel("streams/logs")); + root.add(linkTo(methodOn(StreamLogsController.class).getLog(null)).withRel("streams/logs/{streamName}")); + root.add(linkTo(methodOn(StreamLogsController.class).getLog(null, null)).withRel("streams/logs/{streamName}/{appName}")); } + if (featuresProperties.isTasksEnabled()) { + root.add(entityLinks.linkToCollectionResource(LauncherResource.class).withRel("tasks/platforms")); root.add(entityLinks.linkToCollectionResource(TaskDefinitionResource.class).withRel("tasks/definitions")); root.add(unescapeTemplateVariables(entityLinks.linkToItemResource(TaskDefinitionResource.class, "{name}") .withRel("tasks/definitions/definition"))); root.add(entityLinks.linkToCollectionResource(TaskExecutionResource.class).withRel("tasks/executions")); + root.add(linkTo(methodOn(TaskExecutionController.class).viewByExternal(null,null)).withRel("tasks/executions/external")); + root.add(linkTo(methodOn(TaskExecutionController.class).launchBoot3(null,null,null)).withRel("tasks/executions/launch")); String taskTemplated = entityLinks.linkToCollectionResource(TaskExecutionResource.class).getHref() + "{?name}"; root.add(Link.of(taskTemplated).withRel("tasks/executions/name")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(TaskExecutionController.class) + root.add(linkTo(methodOn(TaskExecutionController.class) .getCurrentTaskExecutionsInfo()).withRel("tasks/executions/current")); - root.add(unescapeTemplateVariables(entityLinks.linkToItemResource(TaskExecutionResource.class, "{id}") - .withRel("tasks/executions/execution"))); + root.add(unescapeTemplateVariables(linkTo(methodOn(TaskExecutionController.class).view(null,null)).withRel("tasks/executions/execution"))); root.add(unescapeTemplateVariables(entityLinks.linkToItemResource(TaskAppStatusResource.class, "{name}") .withRel("tasks/validation"))); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(TasksInfoController.class).getInfo(null, null)).withRel("tasks/info/executions")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(TaskLogsController.class).getLog(null, null)).withRel("tasks/logs")); - + root.add(linkTo(methodOn(TasksInfoController.class).getInfo(null, null, null)).withRel("tasks/info/executions")); + root.add(linkTo(methodOn(TaskLogsController.class).getLog(null, null, null)).withRel("tasks/logs")); + root.add(linkTo(methodOn(TaskExecutionThinController.class).listTasks(null, null)).withRel("tasks/thinexecutions")); + root.add(linkTo(methodOn(TaskExecutionThinController.class).retrieveTasksByName(null, null, null)).withRel("tasks/thinexecutions/name")); if (featuresProperties.isSchedulesEnabled()) { root.add(entityLinks.linkToCollectionResource(ScheduleInfoResource.class).withRel("tasks/schedules")); String scheduleTemplated = entityLinks.linkToCollectionResource(ScheduleInfoResource.class).getHref() diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppInstanceController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppInstanceController.java index 179e6d4e2f..b4a7a29095 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppInstanceController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppInstanceController.java @@ -1,5 +1,5 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,55 +15,79 @@ */ package org.springframework.cloud.dataflow.server.controller; +import java.time.Duration; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.cloud.dataflow.rest.resource.AppInstanceStatusResource; +import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; import org.springframework.cloud.dataflow.server.controller.support.ControllerUtils; import org.springframework.cloud.dataflow.server.stream.StreamDeployer; import org.springframework.cloud.deployer.spi.app.AppInstanceStatus; import org.springframework.cloud.deployer.spi.app.AppStatus; import org.springframework.cloud.deployer.spi.app.DeploymentState; +import org.springframework.cloud.skipper.domain.ActuatorPostRequest; import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.client.RestTemplate; + +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; /** * @author Mark Pollack + * @author Chris Bono */ @RestController @RequestMapping("/runtime/apps/{appId}/instances") @ExposesResourceFor(AppInstanceStatusResource.class) public class RuntimeAppInstanceController { + private final static Logger logger = LoggerFactory.getLogger(RuntimeAppInstanceController.class); - private static final Comparator INSTANCE_SORTER = new Comparator() { - @Override - public int compare(AppInstanceStatus i1, AppInstanceStatus i2) { - return i1.getId().compareTo(i2.getId()); - } - }; + private static final Comparator INSTANCE_SORTER = + (Comparator) (i1, i2) -> i1.getId().compareTo(i2.getId()); private final StreamDeployer streamDeployer; + private final RestTemplate restTemplate; + /** * Construct a new RuntimeAppInstanceController + * * @param streamDeployer the stream deployer to use */ public RuntimeAppInstanceController(StreamDeployer streamDeployer) { this.streamDeployer = streamDeployer; + this.restTemplate = new RestTemplate(); } @RequestMapping public PagedModel list(Pageable pageable, @PathVariable String appId, - PagedResourcesAssembler assembler) { + PagedResourcesAssembler assembler) { AppStatus status = streamDeployer.getAppStatus(appId); if (status.getState().equals(DeploymentState.unknown)) { throw new NoSuchAppException(appId); @@ -87,6 +111,89 @@ public AppInstanceStatusResource display(@PathVariable String appId, @PathVariab return new RuntimeAppInstanceController.InstanceAssembler(status).toModel(appInstanceStatus); } + @RequestMapping(value = "/{instanceId}/actuator", method = RequestMethod.GET) + public ResponseEntity getFromActuator( + @PathVariable String appId, + @PathVariable String instanceId, + @RequestParam String endpoint) { + return ResponseEntity.ok(streamDeployer.getFromActuator(appId, instanceId, endpoint)); + } + + @RequestMapping(value = "/{instanceId}/actuator", method = RequestMethod.POST) + public ResponseEntity postToActuator( + @PathVariable String appId, + @PathVariable String instanceId, + @RequestBody ActuatorPostRequest actuatorPostRequest) { + streamDeployer.postToActuator(appId, instanceId, actuatorPostRequest); + return new ResponseEntity<>(HttpStatus.CREATED); + } + + @RequestMapping(value = "/{instanceId}/post", method = RequestMethod.POST) + public ResponseEntity postToUrl( + @PathVariable String appId, + @PathVariable String instanceId, + @RequestBody String data, + @RequestHeader HttpHeaders headers) { + if (logger.isDebugEnabled()) { + ArgumentSanitizer sanitizer = new ArgumentSanitizer(); + logger.debug("postToUrl:{}:{}:{}:{}", appId, instanceId, data, sanitizer.sanitizeHeaders(headers)); + } + AppStatus status = streamDeployer.getAppStatus(appId); + if (status.getState().equals(DeploymentState.unknown)) { + return ResponseEntity.status(HttpStatus.NOT_FOUND).body("appId not found:" + appId); + } + AppInstanceStatus appInstanceStatus = status.getInstances().get(instanceId); + if (appInstanceStatus == null) { + return ResponseEntity.status(HttpStatus.NOT_FOUND).body("instanceId not found:" + instanceId); + } + String port = appInstanceStatus.getAttributes().get("service.external.port"); + if(!StringUtils.hasText(port)) { + port = "8080"; + } + String url = String.format("http://%s:%s", appInstanceStatus.getAttributes().get("pod.ip"), port); + if (!StringUtils.hasText(url)) { + return ResponseEntity.status(HttpStatus.PRECONDITION_REQUIRED).body("url not found on resource"); + } + // TODO determine if some headers need to be removed or added + HttpEntity entity = new HttpEntity<>(data, headers); + if (logger.isDebugEnabled()) { + ArgumentSanitizer sanitizer = new ArgumentSanitizer(); + logger.debug("postToUrl:{}:{}:{}:{}:{}", appId, instanceId, url, data, sanitizer.sanitizeHeaders(headers)); + } + waitForUrl(url, Duration.ofSeconds(30)); + ResponseEntity response = this.restTemplate.exchange(url, HttpMethod.POST, entity, String.class); + return ResponseEntity.status(response.getStatusCode()).body(response.getBody()); + } + + private void waitForUrl(String uri, Duration timeout) { + // Check + final long waitUntilMillis = System.currentTimeMillis() + timeout.toMillis(); + do { + try { + Set allowed = this.restTemplate.optionsForAllow(uri); + if (!CollectionUtils.isEmpty(allowed)) { + break; + } + } catch (Throwable x) { + logger.trace("waitForUrl:exception:" + x); + final String message = x.getMessage(); + if(message.contains("UnknownHostException")) { + logger.trace("waitForUrl:retry:exception:" + x); + continue; + } + if (message.contains("500")) { + logger.trace("waitForUrl:accepted:exception:" + x); + break; + } + } + try { + Thread.sleep(2000L); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } while (waitUntilMillis <= System.currentTimeMillis()); + } + static class InstanceAssembler extends RepresentationModelAssemblerSupport { @@ -99,7 +206,22 @@ static class InstanceAssembler @Override public AppInstanceStatusResource toModel(AppInstanceStatus entity) { - return createModelWithId("/" + entity.getId(), entity, owningApp.getDeploymentId()); + AppInstanceStatusResource resource = createModelWithId("/" + entity.getId(), entity, owningApp.getDeploymentId()); + if (logger.isDebugEnabled()) { + ArgumentSanitizer sanitizer = new ArgumentSanitizer(); + logger.debug("toModel:{}:{}", resource.getInstanceId(), sanitizer.sanitizeProperties(resource.getAttributes())); + } + if (resource.getAttributes() != null && resource.getAttributes().containsKey("url")) { + resource.add(linkTo( + methodOn(RuntimeAppInstanceController.class).postToUrl( + owningApp.getDeploymentId(), + resource.getInstanceId(), + null, + null) + ).withRel("post")); + logger.debug("toModel:resource={}", resource.getLinks()); + } + return resource; } @Override diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppsController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppsController.java index 5bde7ff959..8efe7c89dd 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppsController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppsController.java @@ -113,7 +113,7 @@ protected AppStatusResource instantiateModel(AppStatus entity) { for (AppInstanceStatus appInstanceStatus : instanceStatuses) { instanceStatusResources.add(instanceAssembler.toModel(appInstanceStatus)); } - resource.setInstances(new CollectionModel<>(instanceStatusResources)); + resource.setInstances(CollectionModel.of(instanceStatusResources)); return resource; } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeStreamsController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeStreamsController.java index 24b38edb54..db4ceb75fb 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeStreamsController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeStreamsController.java @@ -75,14 +75,18 @@ public RuntimeStreamsController(StreamDeployer streamDeployer) { } /** + * @param names The names of streams to include in result. * @param pageable the page * @param assembler the resource assembler * * @return a paged model for stream statuses */ @RequestMapping(method = RequestMethod.GET) - public PagedModel status(@RequestParam(value = "names", required = false) String[] names, Pageable pageable, - PagedResourcesAssembler>> assembler) { + public PagedModel status( + @RequestParam(value = "names", required = false) String[] names, + Pageable pageable, + PagedResourcesAssembler>> assembler + ) { List streamNames = (names!= null) ? Arrays.asList(names): new ArrayList<>(); if (streamNames.isEmpty()) { streamNames = this.streamDeployer.getStreams(); @@ -117,6 +121,9 @@ private List>> getStreamStatusList(String[] streamN /** * @param streamNames comma separated list of streams to retrieve the statuses for + * @param pageable Pageable required on subsequent calls. + * @param assembler The resource assembler for the results. + * @return paged results. */ @RequestMapping(value = "/{streamNames}", method = RequestMethod.GET) public PagedModel streamStatus(@PathVariable("streamNames") String[] streamNames, Pageable pageable, @@ -157,7 +164,7 @@ private StreamStatusResource toStreamStatus(String streamName, List a } } } - streamStatusResource.setApplications(new CollectionModel<>(appStatusResources)); + streamStatusResource.setApplications(CollectionModel.of(appStatusResources)); return streamStatusResource; } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/SchemaController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/SchemaController.java new file mode 100644 index 0000000000..e7a6bd6028 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/SchemaController.java @@ -0,0 +1,110 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.controller; + +import java.util.List; +import java.util.stream.Collectors; + +import org.springframework.cloud.dataflow.rest.resource.SchemaVersionTargetResource; +import org.springframework.cloud.dataflow.rest.resource.SchemaVersionTargetsResource; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersions; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.SchemaVersionTargets; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.hateoas.server.RepresentationModelAssembler; +import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; + +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + +/** + * Provides REST endpoint for {@link SchemaService} + * + * @author Corneil du Plessis + */ +@RestController +@RequestMapping("/schema") +public class SchemaController { + private final SchemaService schemaService; + private final SchemaVersionTargetResourceAssembler targetAssembler = new SchemaVersionTargetResourceAssembler(); + private final SchemaVersionTargetsResourceAssembler targetsAssembler = new SchemaVersionTargetsResourceAssembler(targetAssembler); + + public SchemaController(SchemaService schemaService) { + this.schemaService = schemaService; + } + + @RequestMapping(value = "/versions", method = RequestMethod.GET) + public ResponseEntity getVersions() { + return ResponseEntity.ok(schemaService.getVersions()); + } + + @RequestMapping(value = "/targets", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + public SchemaVersionTargetsResource getTargets() { + return targetsAssembler.toModel(schemaService.getTargets()); + } + + @RequestMapping(value = "/targets/{schemaTarget}", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + public SchemaVersionTargetResource getTarget(@PathVariable("schemaTarget") String schemaTarget) { + SchemaVersionTarget target = schemaService.getTarget(schemaTarget); + if (target == null) { + throw new NoSuchSchemaTargetException(schemaTarget); + } + return targetAssembler.toModel(target); + } + + public static class SchemaVersionTargetResourceAssembler extends RepresentationModelAssemblerSupport { + public SchemaVersionTargetResourceAssembler() { + super(SchemaController.class, SchemaVersionTargetResource.class); + } + + @Override + public SchemaVersionTargetResource toModel(SchemaVersionTarget entity) { + SchemaVersionTargetResource resource = new SchemaVersionTargetResource(entity.getName(), entity.getSchemaVersion(), entity.getTaskPrefix(), entity.getBatchPrefix(), entity.getDatasource()); + resource.add(linkTo(methodOn(SchemaController.class).getTarget(entity.getName())).withSelfRel()); + return resource; + } + } + + + static class SchemaVersionTargetsResourceAssembler extends RepresentationModelAssemblerSupport { + private final RepresentationModelAssembler assembler; + + public SchemaVersionTargetsResourceAssembler(RepresentationModelAssembler assembler) { + super(SchemaController.class, SchemaVersionTargetsResource.class); + this.assembler = assembler; + } + + @Override + public SchemaVersionTargetsResource toModel(SchemaVersionTargets entity) { + List targets = entity.getSchemas().stream() + .map(target -> assembler.toModel(target)) + .collect(Collectors.toList()); + SchemaVersionTargetsResource resource = new SchemaVersionTargetsResource(entity.getDefaultSchemaTarget(), targets); + resource.add(linkTo(methodOn(SchemaController.class).getTargets()).withSelfRel()); + return resource; + } + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDefinitionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDefinitionController.java index d75f8d046f..3f68dd3edb 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDefinitionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDefinitionController.java @@ -20,6 +20,7 @@ import java.util.Collections; import java.util.LinkedList; import java.util.List; +import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -44,8 +45,10 @@ import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.hateoas.server.RepresentationModelAssembler; import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; import org.springframework.util.Assert; import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; @@ -89,16 +92,16 @@ public class StreamDefinitionController { /** * Create a {@code StreamDefinitionController} that delegates to {@link StreamService}. * - * @param streamService the stream service to use - * @param streamDefinitionService the stream definition service to use - * @param appRegistryService the app registry service to use + * @param streamService the stream service to use + * @param streamDefinitionService the stream definition service to use + * @param appRegistryService the app registry service to use * @param streamDefinitionAssemblerProvider the stream definition assembler provider to use - * @param appRegistrationAssemblerProvider the app registry assembler provider to use - * */ + * @param appRegistrationAssemblerProvider the app registry assembler provider to use + */ public StreamDefinitionController(StreamService streamService, StreamDefinitionService streamDefinitionService, - AppRegistryService appRegistryService, - StreamDefinitionAssemblerProvider streamDefinitionAssemblerProvider, - AppRegistrationAssemblerProvider appRegistrationAssemblerProvider) { + AppRegistryService appRegistryService, + StreamDefinitionAssemblerProvider streamDefinitionAssemblerProvider, + AppRegistrationAssemblerProvider appRegistrationAssemblerProvider) { Assert.notNull(streamService, "StreamService must not be null"); Assert.notNull(streamDefinitionService, "StreamDefinitionService must not be null"); Assert.notNull(appRegistryService, "AppRegistryService must not be null"); @@ -114,40 +117,81 @@ public StreamDefinitionController(StreamService streamService, StreamDefinitionS /** * Return a page-able list of {@link StreamDefinitionResource} defined streams. * - * @param pageable Pagination information + * @param pageable Pagination information * @param assembler assembler for {@link StreamDefinition} - * @param search optional findByTaskNameContains parameter + * @param search optional findByTaskNameContains parameter * @return list of stream definitions */ @RequestMapping(value = "", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public PagedModel list(Pageable pageable, - @RequestParam(required = false) String search, PagedResourcesAssembler assembler) { + public PagedModel list( + Pageable pageable, + @RequestParam(required = false) String search, + PagedResourcesAssembler assembler + ) { Page streamDefinitions = this.streamService.findDefinitionByNameContains(pageable, search); return assembler.toModel(streamDefinitions, this.streamDefinitionAssemblerProvider.getStreamDefinitionAssembler(streamDefinitions.getContent())); } /** - * Create a new stream. + * Create a new stream and optionally deploy it. + *

    + * Differs from {@link #saveWithDeployProps} by accepting deployment properties and consuming + * {@link MediaType#APPLICATION_FORM_URLENCODED} request content (required by the Dataflow Shell). * - * @param name stream name - * @param dsl DSL definition for stream - * @param deploy if {@code true}, the stream is deployed upon creation (default is - * {@code false}) + * @param name stream name + * @param dsl DSL definition for stream + * @param deploy if {@code true}, the stream is deployed upon creation (default is + * {@code false}) * @param description description of the stream definition * @return the created stream definition * @throws DuplicateStreamDefinitionException if a stream definition with the same name - * already exists - * @throws InvalidStreamDefinitionException if there errors in parsing the stream DSL, - * resolving the name, or type of applications in the stream + * already exists + * @throws InvalidStreamDefinitionException if there are errors parsing the stream DSL, + * resolving the name, or type of applications in the stream + */ + @RequestMapping(value = "", method = RequestMethod.POST, consumes = MediaType.APPLICATION_FORM_URLENCODED_VALUE) + @ResponseStatus(HttpStatus.CREATED) + public StreamDefinitionResource save( + @RequestParam("name") String name, + @RequestParam("definition") String dsl, + @RequestParam(value = "description", defaultValue = "") String description, + @RequestParam(value = "deploy", defaultValue = "false") boolean deploy + ) { + StreamDefinition streamDefinition = this.streamService.createStream(name, dsl, description, deploy, null); + return ((RepresentationModelAssembler) + this.streamDefinitionAssemblerProvider.getStreamDefinitionAssembler(Collections.singletonList(streamDefinition))).toModel(streamDefinition); + } + + /** + * Create a new stream and optionally deploy it. + *

    + * Differs from {@link #save} by accepting deployment properties and consuming + * {@link MediaType#APPLICATION_JSON} request content. + * + * @param name stream name + * @param dsl DSL definition for stream + * @param deploy if {@code true}, the stream is deployed upon creation (default is + * {@code false}) + * @param deploymentProperties the optional deployment properties to use when the stream is deployed upon creation + * @param description description of the stream definition + * @return the created stream definition + * @throws DuplicateStreamDefinitionException if a stream definition with the same name + * already exists + * @throws InvalidStreamDefinitionException if there are errors parsing the stream DSL, + * resolving the name, or type of applications in the stream */ - @RequestMapping(value = "", method = RequestMethod.POST) + @RequestMapping(value = "", method = RequestMethod.POST, consumes = MediaType.APPLICATION_JSON_VALUE) @ResponseStatus(HttpStatus.CREATED) - public StreamDefinitionResource save(@RequestParam("name") String name, @RequestParam("definition") String dsl, - @RequestParam(value = "description", defaultValue = "") String description, - @RequestParam(value = "deploy", defaultValue = "false") boolean deploy) { - StreamDefinition streamDefinition = this.streamService.createStream(name, dsl, description, deploy); + public StreamDefinitionResource saveWithDeployProps( + @RequestParam("name") String name, + @RequestParam("definition") String dsl, + @RequestParam(value = "description", defaultValue = "") String description, + @RequestParam(value = "deploy", defaultValue = "false") boolean deploy, + @RequestBody(required = false) Map deploymentProperties + ) { + StreamDefinition streamDefinition = this.streamService.createStream(name, dsl, description, deploy, deploymentProperties); return ((RepresentationModelAssembler) this.streamDefinitionAssemblerProvider.getStreamDefinitionAssembler(Collections.singletonList(streamDefinition))).toModel(streamDefinition); } @@ -167,18 +211,20 @@ public void delete(@PathVariable("name") String name) { * Return a list of related stream definition resources based on the given stream name. * Related streams include the main stream and the tap stream(s) on the main stream. * - * @param pageable Pagination information - * @param name the name of an existing stream definition (required) - * @param nested if should recursively findByTaskNameContains for related stream definitions + * @param pageable Pagination information + * @param name the name of an existing stream definition (required) + * @param nested if should recursively findByTaskNameContains for related stream definitions * @param assembler resource assembler for stream definition * @return a list of related stream definitions */ @RequestMapping(value = "/{name}/related", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public PagedModel listRelated(Pageable pageable, + public PagedModel listRelated( + Pageable pageable, @PathVariable("name") String name, @RequestParam(value = "nested", required = false, defaultValue = "false") boolean nested, - PagedResourcesAssembler assembler) { + PagedResourcesAssembler assembler + ) { List result = this.streamService.findRelatedStreams(name, nested); Page page = new PageImpl<>(result, pageable, result.size()); return assembler.toModel(page, @@ -206,7 +252,7 @@ public List listApplications(@PathVariable("n StreamDefinition definition = this.streamService.findOne(name); LinkedList streamAppDefinitions = this.streamDefinitionService.getAppDefinitions(definition); List appRegistrations = new ArrayList<>(); - for (StreamAppDefinition streamAppDefinition: streamAppDefinitions) { + for (StreamAppDefinition streamAppDefinition : streamAppDefinitions) { AppRegistrationResource appRegistrationResource = this.appRegistryAssembler.toModel(this.appRegistryService.find(streamAppDefinition.getRegisteredAppName(), streamAppDefinition.getApplicationType())); appRegistrationResource.setLabel(streamAppDefinition.getName()); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDeploymentController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDeploymentController.java index f57ec71239..26fbef7a2f 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDeploymentController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDeploymentController.java @@ -19,6 +19,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Map; +import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -29,6 +30,7 @@ import org.springframework.cloud.dataflow.rest.UpdateStreamRequest; import org.springframework.cloud.dataflow.rest.resource.DeploymentStateResource; import org.springframework.cloud.dataflow.rest.resource.StreamDeploymentResource; +import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; import org.springframework.cloud.dataflow.server.controller.support.ControllerUtils; import org.springframework.cloud.dataflow.server.repository.NoSuchStreamDefinitionException; import org.springframework.cloud.dataflow.server.repository.StreamDefinitionRepository; @@ -80,16 +82,19 @@ public class StreamDeploymentController { */ private final StreamDefinitionRepository repository; + private final ArgumentSanitizer sanitizer = new ArgumentSanitizer(); + /** * Construct a new UpdatableStreamDeploymentController, given a - * {@link StreamDeploymentController} and {@link StreamService} + * {@link StreamDeploymentController} and {@link StreamService} and {@link StreamDefinitionService} * - * @param repository the repository this controller will use for stream CRUD operations - * @param streamService the underlying UpdatableStreamService to deploy the stream + * @param repository the repository this controller will use for stream CRUD operations + * @param streamService the underlying UpdatableStreamService to deploy the stream + * @param streamDefinitionService the StreamDefinitionService */ public StreamDeploymentController(StreamDefinitionRepository repository, - StreamService streamService, - StreamDefinitionService streamDefinitionService) { + StreamService streamService, + StreamDefinitionService streamDefinitionService) { Assert.notNull(repository, "StreamDefinitionRepository must not be null"); Assert.notNull(streamService, "StreamService must not be null"); @@ -102,9 +107,10 @@ public StreamDeploymentController(StreamDefinitionRepository repository, /** * Scale application instances in a deployed stream. + * * @param streamName the name of an existing stream definition (required) - * @param appName in stream application name to scale (required) - * @param count number of instances for the selected stream application (required) + * @param appName in stream application name to scale (required) + * @param count number of instances for the selected stream application (required) * @param properties scale deployment specific properties (optional) * @return response without a body */ @@ -115,14 +121,14 @@ public ResponseEntity scaleApplicationInstances( @PathVariable("count") Integer count, @RequestBody(required = false) Map properties) { - logger.info(String.format("Scale stream: %s, apps: %s instances to %s", streamName, appName, count)); + logger.info("Scale stream: {}, apps: {} instances to {}", streamName, appName, count); this.streamService.scaleApplicationInstances(streamName, appName, count, properties); return new ResponseEntity<>(HttpStatus.CREATED); } @RequestMapping(value = "/update/{name}", method = RequestMethod.POST) public ResponseEntity update(@PathVariable("name") String name, - @RequestBody UpdateStreamRequest updateStreamRequest) { + @RequestBody UpdateStreamRequest updateStreamRequest) { this.streamService.updateStream(name, updateStreamRequest); return new ResponseEntity<>(HttpStatus.CREATED); } @@ -135,14 +141,24 @@ public ResponseEntity rollback(@PathVariable("name") String name, @PathVar @RequestMapping(value = "/manifest/{name}/{version}", method = RequestMethod.GET) public ResponseEntity manifest(@PathVariable("name") String name, - @PathVariable("version") Integer version) { + @PathVariable("version") Integer version) { return new ResponseEntity<>(this.streamService.manifest(name, version), HttpStatus.OK); } @RequestMapping(path = "/history/{name}", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) public Collection history(@PathVariable("name") String releaseName) { - return this.streamService.history(releaseName); + return this.streamService.history(releaseName) + .stream() + .map(this::sanitizeRelease) + .collect(Collectors.toList()); + } + + private Release sanitizeRelease(Release release) { + if (release.getConfigValues() != null && StringUtils.hasText(release.getConfigValues().getRaw())) { + release.getConfigValues().setRaw(sanitizer.sanitizeJsonOrYamlString(release.getConfigValues().getRaw())); + } + return release; } @RequestMapping(path = "/platform/list", method = RequestMethod.GET) @@ -167,6 +183,7 @@ public ResponseEntity undeploy(@PathVariable("name") String name) { /** * Request un-deployment of all streams. + * * @return instance of {@link ResponseEntity} */ @RequestMapping(value = "", method = RequestMethod.DELETE) @@ -179,13 +196,17 @@ public ResponseEntity undeployAll() { /** * Request deployment of an existing stream definition. - * @param name the name of an existing stream definition (required) + * + * @param name the name of an existing stream definition (required) + * @param reuseDeploymentProperties Indicator to re-use deployment properties. * @return The stream deployment */ @RequestMapping(value = "/{name}", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public StreamDeploymentResource info(@PathVariable("name") String name, - @RequestParam(value = "reuse-deployment-properties", required = false) boolean reuseDeploymentProperties) { + public StreamDeploymentResource info( + @PathVariable("name") String name, + @RequestParam(value = "reuse-deployment-properties", required = false) boolean reuseDeploymentProperties + ) { StreamDefinition streamDefinition = this.repository.findById(name) .orElseThrow(() -> new NoSuchStreamDefinitionException(name)); StreamDeployment streamDeployment = this.streamService.info(name); @@ -203,14 +224,15 @@ public StreamDeploymentResource info(@PathVariable("name") String name, /** * Request deployment of an existing stream definition. - * @param name the name of an existing stream definition (required) + * + * @param name the name of an existing stream definition (required) * @param properties the deployment properties for the stream as a comma-delimited list of - * key=value pairs + * key=value pairs * @return response without a body */ @RequestMapping(value = "/{name}", method = RequestMethod.POST) public ResponseEntity deploy(@PathVariable("name") String name, - @RequestBody(required = false) Map properties) { + @RequestBody(required = false) Map properties) { this.streamService.deployStream(name, properties); return new ResponseEntity<>(HttpStatus.CREATED); } @@ -241,8 +263,7 @@ public Assembler(String dslText, String description, String status, boolean reus public StreamDeploymentResource toModel(StreamDeployment streamDeployment) { try { return createModelWithId(streamDeployment.getStreamName(), streamDeployment); - } - catch (IllegalStateException e) { + } catch (IllegalStateException e) { logger.warn("Failed to create StreamDeploymentResource. " + e.getMessage()); } return null; @@ -255,7 +276,7 @@ public StreamDeploymentResource instantiateModel(StreamDeployment streamDeployme (StringUtils.hasText(streamDeployment.getDeploymentProperties()) && canDisplayDeploymentProperties())) { deploymentProperties = streamDeployment.getDeploymentProperties(); } - return new StreamDeploymentResource(streamDeployment.getStreamName(), + return new StreamDeploymentResource(streamDeployment.getStreamName(), streamDefinitionService.redactDsl(new StreamDefinition(streamDeployment.getStreamName(), this.dslText)), this.description, deploymentProperties, this.status); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskDefinitionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskDefinitionController.java index 8e1e0207bc..bf9fc16260 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskDefinitionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskDefinitionController.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2020 the original author or authors. + * Copyright 2016-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,23 +20,26 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.springframework.cloud.dataflow.core.TaskDefinition; import org.springframework.cloud.dataflow.core.dsl.TaskNode; import org.springframework.cloud.dataflow.core.dsl.TaskParser; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; import org.springframework.cloud.dataflow.server.controller.assembler.TaskDefinitionAssemblerProvider; import org.springframework.cloud.dataflow.server.controller.support.TaskExecutionAwareTaskDefinition; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskDefinitionException; import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.repository.TaskQueryParamException; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; import org.springframework.cloud.dataflow.server.service.TaskDeleteService; import org.springframework.cloud.dataflow.server.service.TaskSaveService; import org.springframework.cloud.dataflow.server.service.impl.TaskServiceUtils; import org.springframework.cloud.deployer.spi.task.TaskLauncher; -import org.springframework.cloud.task.repository.TaskExecution; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; @@ -62,6 +65,7 @@ * @author Gunnar Hillert * @author Daniel Serleg * @author Ilayaperumal Gopinathan + * @author Chris Bono */ @RestController @RequestMapping("/tasks/definitions") @@ -74,7 +78,7 @@ public class TaskDefinitionController { private final TaskDeleteService taskDeleteService; - private final TaskExplorer explorer; + private final AggregateTaskExplorer explorer; private final TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider; @@ -85,15 +89,15 @@ public class TaskDefinitionController { *

  • task status checks to the provided {@link TaskLauncher}
  • * * - * @param taskExplorer used to look up TaskExecutions. - * @param repository the repository this controller will use for task CRUD operations. - * @param taskSaveService handles Task saving related operations. - * @param taskDeleteService handles Task deletion related operations. + * @param taskExplorer used to look up TaskExecutions. + * @param repository the repository this controller will use for task CRUD operations. + * @param taskSaveService handles Task saving related operations. + * @param taskDeleteService handles Task deletion related operations. * @param taskDefinitionAssemblerProvider the task definition assembler provider to use. */ - public TaskDefinitionController(TaskExplorer taskExplorer, TaskDefinitionRepository repository, - TaskSaveService taskSaveService, TaskDeleteService taskDeleteService, - TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider) { + public TaskDefinitionController(AggregateTaskExplorer taskExplorer, TaskDefinitionRepository repository, + TaskSaveService taskSaveService, TaskDeleteService taskDeleteService, + TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider) { Assert.notNull(taskExplorer, "taskExplorer must not be null"); Assert.notNull(repository, "repository must not be null"); Assert.notNull(taskSaveService, "taskSaveService must not be null"); @@ -109,14 +113,17 @@ public TaskDefinitionController(TaskExplorer taskExplorer, TaskDefinitionReposit /** * Register a task definition for future execution. * - * @param name name the name of the task - * @param dsl DSL definition for the task + * @param name name the name of the task + * @param dsl DSL definition for the task * @param description description of the task definition * @return the task definition */ @RequestMapping(value = "", method = RequestMethod.POST) - public TaskDefinitionResource save(@RequestParam("name") String name, @RequestParam("definition") String dsl, - @RequestParam(value = "description", defaultValue = "") String description) { + public TaskDefinitionResource save( + @RequestParam("name") String name, + @RequestParam("definition") String dsl, + @RequestParam(value = "description", defaultValue = "") String description + ) { TaskDefinition taskDefinition = new TaskDefinition(name, dsl, description); taskSaveService.saveTaskDefinition(taskDefinition); return this.taskDefinitionAssemblerProvider.getTaskDefinitionAssembler(false).toModel(new TaskExecutionAwareTaskDefinition(taskDefinition)); @@ -125,18 +132,21 @@ public TaskDefinitionResource save(@RequestParam("name") String name, @RequestPa /** * Delete the task from the repository so that it can no longer be executed. * - * @param name name of the task to be deleted + * @param name name of the task to be deleted + * @param cleanup optional cleanup indicator. */ @RequestMapping(value = "/{name}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.OK) - public void destroyTask(@PathVariable("name") String name, @RequestParam(required = false) Boolean cleanup) { + public void destroyTask( + @PathVariable("name") String name, + @RequestParam(required = false) Boolean cleanup + ) { boolean taskExecutionCleanup = (cleanup != null && cleanup) ? cleanup : false; this.taskDeleteService.deleteTaskDefinition(name, taskExecutionCleanup); } /** * Delete all task from the repository. - * */ @RequestMapping(value = "", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.OK) @@ -147,49 +157,51 @@ public void destroyAll() { /** * Return a page-able list of {@link TaskDefinitionResource} defined tasks. * - * @param pageable page-able collection of {@code TaskDefinitionResource} - * @param search optional findByTaskNameContains parameter - * @param dslText optional findByDslText parameter - * @param manifest optional manifest flag to indicate whether the latest task execution requires task manifest update - * @param assembler assembler for the {@link TaskDefinition} + * @param pageable page-able collection of {@code TaskDefinitionResource} + * @param search optional findByTaskNameContains parameter (Deprecated: please use taskName instead) + * @param taskName optional findByTaskNameContains parameter + * @param dslText optional findByDslText parameter + * @param description optional findByDescription parameter + * @param manifest optional manifest flag to indicate whether the latest task execution requires task manifest update + * @param assembler assembler for the {@link TaskDefinition} * @return a list of task definitions */ @RequestMapping(value = "", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public PagedModel list(Pageable pageable, @RequestParam(required = false) String search, - @RequestParam(required = false) boolean manifest, @RequestParam(required = false) String dslText, - PagedResourcesAssembler assembler) { - + public PagedModel list( + Pageable pageable, + @RequestParam(required = false) @Deprecated String search, + @RequestParam(required = false) String taskName, + @RequestParam(required = false) String description, + @RequestParam(required = false) boolean manifest, + @RequestParam(required = false) String dslText, + PagedResourcesAssembler assembler + ) { final Page taskDefinitions; - if (search != null) { - if (dslText != null) { - throw new TaskQueryParamException(new String[] {"search", "dslText"}); - } else { - taskDefinitions = repository.findByTaskNameContains(search, pageable); - } - } - else { - if (dslText != null) { - taskDefinitions = repository.findByDslTextContains(dslText, pageable); - } else { - taskDefinitions = repository.findAll(pageable); - } - } - final java.util.HashMap taskDefinitionMap = new java.util.HashMap<>(); + if (Stream.of(search, taskName, description, dslText).filter(Objects::nonNull).count() > 1L) { + throw new TaskQueryParamException(new String[]{"taskName (or search)", "description", "dslText"}); + } - for (TaskDefinition taskDefinition : taskDefinitions) { - taskDefinitionMap.put(taskDefinition.getName(), taskDefinition); + if (taskName != null) { + taskDefinitions = repository.findByTaskNameContains(taskName, pageable); + } else if (search != null) { + taskDefinitions = repository.findByTaskNameContains(search, pageable); + } else if (description != null) { + taskDefinitions = repository.findByDescriptionContains(description, pageable); + } else if (dslText != null) { + taskDefinitions = repository.findByDslTextContains(dslText, pageable); + } else { + taskDefinitions = repository.findAll(pageable); } - final List taskExecutions; + final Map taskDefinitionMap = taskDefinitions + .stream() + .collect(Collectors.toMap(TaskDefinition::getTaskName, Function.identity())); + List taskExecutions = null; if (!taskDefinitionMap.isEmpty()) { - taskExecutions = this.explorer.getLatestTaskExecutionsByTaskNames( - taskDefinitionMap.keySet().toArray(new String[taskDefinitionMap.size()])); - } - else { - taskExecutions = null; + taskExecutions = this.explorer.getLatestTaskExecutionsByTaskNames(taskDefinitionMap.keySet().toArray(new String[0])); } final Page taskExecutionAwareTaskDefinitions = taskDefinitions @@ -204,12 +216,12 @@ public PagedModel list(Pageable pageable, @Req private Collection updateComposedTaskElement(Collection taskDefinitionResources, - Page taskDefinitions) { + Page taskDefinitions) { Map taskNameResources = new HashMap<>(); - for (TaskDefinitionResource taskDefinitionResource: taskDefinitionResources) { + for (TaskDefinitionResource taskDefinitionResource : taskDefinitionResources) { taskNameResources.put(taskDefinitionResource.getName(), taskDefinitionResource); } - for (TaskDefinition taskDefinition: taskDefinitions) { + for (TaskDefinition taskDefinition : taskDefinitions) { TaskParser taskParser = new TaskParser(taskDefinition.getName(), taskDefinition.getDslText(), true, true); TaskNode taskNode = taskParser.parse(); if (taskNode.isComposed()) { @@ -226,22 +238,25 @@ private Collection updateComposedTaskElement(C /** * Return a given task definition resource. * - * @param name the name of an existing task definition (required) + * @param name the name of an existing task definition (required) + * @param manifest indicator to include manifest in response. * @return the task definition */ @RequestMapping(value = "/{name}", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public TaskDefinitionResource display(@PathVariable("name") String name, @RequestParam(required = false, name = "manifest") boolean manifest) { + public TaskDefinitionResource display( + @PathVariable("name") String name, + @RequestParam(required = false, name = "manifest") boolean manifest + ) { TaskDefinition definition = this.repository.findById(name) .orElseThrow(() -> new NoSuchTaskDefinitionException(name)); - final TaskExecution taskExecution = this.explorer.getLatestTaskExecutionForTaskName(name); + final AggregateTaskExecution taskExecution = this.explorer.getLatestTaskExecutionForTaskName(name); final RepresentationModelAssembler taskAssembler = this.taskDefinitionAssemblerProvider.getTaskDefinitionAssembler(manifest); TaskDefinitionResource taskDefinitionResource; if (taskExecution != null) { taskDefinitionResource = taskAssembler.toModel(new TaskExecutionAwareTaskDefinition(definition, taskExecution)); - } - else { + } else { taskDefinitionResource = taskAssembler.toModel(new TaskExecutionAwareTaskDefinition(definition)); } // Identify if the task definition is a composed task element @@ -262,24 +277,23 @@ private void updateComposedTaskElement(TaskDefinitionResource taskDefinitionReso } class TaskDefinitionConverter implements Function { - final Map taskExecutions; + final Map taskExecutions; - public TaskDefinitionConverter(List taskExecutions) { + public TaskDefinitionConverter(List taskExecutions) { super(); if (taskExecutions != null) { this.taskExecutions = new HashMap<>(taskExecutions.size()); - for (TaskExecution taskExecution : taskExecutions) { + for (AggregateTaskExecution taskExecution : taskExecutions) { this.taskExecutions.put(taskExecution.getTaskName(), taskExecution); } - } - else { + } else { this.taskExecutions = null; } } @Override public TaskExecutionAwareTaskDefinition apply(TaskDefinition source) { - TaskExecution lastTaskExecution = null; + AggregateTaskExecution lastTaskExecution = null; if (taskExecutions != null) { lastTaskExecution = taskExecutions.get(source.getName()); @@ -287,10 +301,11 @@ public TaskExecutionAwareTaskDefinition apply(TaskDefinition source) { if (lastTaskExecution != null) { return new TaskExecutionAwareTaskDefinition(source, lastTaskExecution); - } - else { + } else { return new TaskExecutionAwareTaskDefinition(source); } } - }; + } + + ; } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionController.java index bce91ad291..54c059b1ca 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionController.java @@ -19,15 +19,23 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.launch.NoSuchJobExecutionException; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.core.LaunchResponse; import org.springframework.cloud.dataflow.core.PlatformTaskExecutionInformation; import org.springframework.cloud.dataflow.core.TaskDefinition; import org.springframework.cloud.dataflow.core.TaskManifest; @@ -35,10 +43,15 @@ import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.job.TaskJobExecutionRel; import org.springframework.cloud.dataflow.rest.resource.CurrentTaskExecutionsResource; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionsInfoResource; +import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; import org.springframework.cloud.dataflow.rest.util.DeploymentPropertiesUtils; import org.springframework.cloud.dataflow.rest.util.TaskSanitizer; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.server.config.DataflowAsyncAutoConfiguration; import org.springframework.cloud.dataflow.server.controller.support.TaskExecutionControllerDeleteAction; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskDefinitionException; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskExecutionException; @@ -54,11 +67,14 @@ import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.data.web.PagedResourcesAssembler; +import org.springframework.hateoas.Link; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; +import org.springframework.scheduling.annotation.Async; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; @@ -66,6 +82,9 @@ import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * Controller for operations on * {@link org.springframework.cloud.task.repository.TaskExecution}. This includes @@ -77,6 +96,7 @@ * @author Christian Tzolov * @author David Turanski * @author Gunnar Hillert + * @author Corneil du Plessis */ @RestController @RequestMapping("/tasks/executions") @@ -84,14 +104,18 @@ public class TaskExecutionController { private final Assembler taskAssembler = new Assembler(); - + private final LaunchResponseAssembler launcherResponseAssembler = new LaunchResponseAssembler(); private final TaskExecutionService taskExecutionService; private final TaskExecutionInfoService taskExecutionInfoService; private final TaskDeleteService taskDeleteService; - private final TaskExplorer explorer; + private final AggregateTaskExplorer explorer; + + private final AggregateExecutionSupport aggregateExecutionSupport; + + private final TaskDefinitionReader taskDefinitionReader; private final TaskJobService taskJobService; @@ -101,25 +125,36 @@ public class TaskExecutionController { private final Logger logger = LoggerFactory.getLogger(TaskExecutionController.class); + + private final ArgumentSanitizer argumentSanitizer = new ArgumentSanitizer(); + private static final List allowedSorts = Arrays.asList("TASK_EXECUTION_ID", "START_TIME", "END_TIME", "TASK_NAME", "EXIT_CODE", "EXIT_MESSAGE", "ERROR_MESSAGE", "LAST_UPDATED", "EXTERNAL_EXECUTION_ID", - "PARENT_EXECUTION_ID"); + "PARENT_EXECUTION_ID", "SCHEMA_TARGET"); /** * Creates a {@code TaskExecutionController} that retrieves Task Execution information * from a the {@link TaskExplorer} * - * @param explorer the explorer this controller will use for retrieving task execution - * information. - * @param taskExecutionService used to launch tasks - * @param taskDefinitionRepository the task definition repository - * @param taskExecutionInfoService the task execution information service - * @param taskDeleteService the task deletion service - * @param taskJobService the task job service + * @param explorer the explorer this controller will use for retrieving task execution + * information. + * @param aggregateExecutionSupport provides schemaTarget for a task by name. + * @param taskExecutionService used to launch tasks + * @param taskDefinitionRepository the task definition repository + * @param taskDefinitionReader uses task definition repository to provide Task Definition to aggregateExecutionSupport + * @param taskExecutionInfoService the task execution information service + * @param taskDeleteService the task deletion service + * @param taskJobService the task job service */ - public TaskExecutionController(TaskExplorer explorer, TaskExecutionService taskExecutionService, - TaskDefinitionRepository taskDefinitionRepository, TaskExecutionInfoService taskExecutionInfoService, - TaskDeleteService taskDeleteService, TaskJobService taskJobService) { + public TaskExecutionController(AggregateTaskExplorer explorer, + AggregateExecutionSupport aggregateExecutionSupport, + TaskExecutionService taskExecutionService, + TaskDefinitionRepository taskDefinitionRepository, + TaskDefinitionReader taskDefinitionReader, + TaskExecutionInfoService taskExecutionInfoService, + TaskDeleteService taskDeleteService, + TaskJobService taskJobService) { + this.taskDefinitionReader = taskDefinitionReader; Assert.notNull(explorer, "explorer must not be null"); Assert.notNull(taskExecutionService, "taskExecutionService must not be null"); Assert.notNull(taskDefinitionRepository, "taskDefinitionRepository must not be null"); @@ -128,6 +163,7 @@ public TaskExecutionController(TaskExplorer explorer, TaskExecutionService taskE Assert.notNull(taskJobService, "taskJobService must not be null"); this.taskExecutionService = taskExecutionService; this.explorer = explorer; + this.aggregateExecutionSupport = aggregateExecutionSupport; this.taskDefinitionRepository = taskDefinitionRepository; this.taskExecutionInfoService = taskExecutionInfoService; this.taskDeleteService = taskDeleteService; @@ -137,16 +173,16 @@ public TaskExecutionController(TaskExplorer explorer, TaskExecutionService taskE /** * Return a page-able list of {@link TaskExecutionResource} defined tasks. * - * @param pageable page-able collection of {@code TaskExecution}s. + * @param pageable page-able collection of {@code TaskExecution}s. * @param assembler for the {@link TaskExecution}s * @return a list of task executions */ @RequestMapping(value = "", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) public PagedModel list(Pageable pageable, - PagedResourcesAssembler assembler) { + PagedResourcesAssembler assembler) { validatePageable(pageable); - Page taskExecutions = this.explorer.findAll(pageable); + Page taskExecutions = this.explorer.findAll(pageable); Page result = getPageableRelationships(taskExecutions, pageable); return assembler.toModel(result, this.taskAssembler); } @@ -154,19 +190,22 @@ public PagedModel list(Pageable pageable, /** * Retrieve all task executions with the task name specified * - * @param taskName name of the task - * @param pageable page-able collection of {@code TaskExecution}s. + * @param taskName name of the task + * @param pageable page-able collection of {@code TaskExecution}s. * @param assembler for the {@link TaskExecution}s * @return the paged list of task executions */ @RequestMapping(value = "", method = RequestMethod.GET, params = "name") @ResponseStatus(HttpStatus.OK) - public PagedModel retrieveTasksByName(@RequestParam("name") String taskName, - Pageable pageable, PagedResourcesAssembler assembler) { + public PagedModel retrieveTasksByName( + @RequestParam("name") String taskName, + Pageable pageable, + PagedResourcesAssembler assembler + ) { validatePageable(pageable); this.taskDefinitionRepository.findById(taskName) .orElseThrow(() -> new NoSuchTaskDefinitionException(taskName)); - Page taskExecutions = this.explorer.findTaskExecutionsByName(taskName, pageable); + Page taskExecutions = this.explorer.findTaskExecutionsByName(taskName, pageable); Page result = getPageableRelationships(taskExecutions, pageable); return assembler.toModel(result, this.taskAssembler); } @@ -176,21 +215,41 @@ public PagedModel retrieveTasksByName(@RequestParam("name * if `spring.cloud.dataflow.task.auto-create-task-definitions` is true. * The name must be included in the path. * - * @param taskName the name of the task to be executed (required) + * @param taskName the name of the task to be executed (required) * @param properties the runtime properties for the task, as a comma-delimited list of - * key=value pairs - * @param arguments the runtime commandline arguments + * key=value pairs + * @param arguments the runtime commandline arguments * @return the taskExecutionId for the executed task */ @RequestMapping(value = "", method = RequestMethod.POST, params = "name") @ResponseStatus(HttpStatus.CREATED) - public long launch(@RequestParam("name") String taskName, + public long launch( + @RequestParam("name") String taskName, @RequestParam(required = false) String properties, - @RequestParam(required = false) String arguments) { + @RequestParam(required = false) String arguments + ) { + SchemaVersionTarget schemaVersionTarget = aggregateExecutionSupport.findSchemaVersionTarget(taskName, taskDefinitionReader); + if(!schemaVersionTarget.equals(SchemaVersionTarget.defaultTarget())) { + Link link = linkTo(methodOn(TaskExecutionController.class).launchBoot3(taskName, properties, arguments)).withRel("launch"); + throw new ApiNotSupportedException(String.format("Task: %s cannot be launched for %s. Use %s", taskName, SchemaVersionTarget.defaultTarget().getName(), link.getHref())); + } Map propertiesToUse = DeploymentPropertiesUtils.parse(properties); List argumentsToUse = DeploymentPropertiesUtils.parseArgumentList(arguments, " "); - - return this.taskExecutionService.executeTask(taskName, propertiesToUse, argumentsToUse); + LaunchResponse launchResponse = this.taskExecutionService.executeTask(taskName, propertiesToUse, argumentsToUse); + return launchResponse.getExecutionId(); + } + @RequestMapping(value = "/launch", method = RequestMethod.POST, params = "name") + @ResponseStatus(HttpStatus.CREATED) + public LaunchResponseResource launchBoot3( + @RequestParam("name") String taskName, + @RequestParam(required = false) String properties, + @RequestParam(required = false) String arguments + ) { + // TODO update docs and root + Map propertiesToUse = DeploymentPropertiesUtils.parse(properties); + List argumentsToUse = DeploymentPropertiesUtils.parseArgumentList(arguments, " "); + LaunchResponse launchResponse = this.taskExecutionService.executeTask(taskName, propertiesToUse, argumentsToUse); + return this.launcherResponseAssembler.toModel(launchResponse); } /** @@ -201,18 +260,45 @@ public long launch(@RequestParam("name") String taskName, */ @RequestMapping(value = "/{id}", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public TaskExecutionResource view(@PathVariable("id") long id) { - TaskExecution taskExecution = this.explorer.getTaskExecution(id); + public TaskExecutionResource view( + @PathVariable(name = "id") Long id, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + AggregateTaskExecution taskExecution = sanitizeTaskExecutionArguments(this.explorer.getTaskExecution(id, schemaTarget)); if (taskExecution == null) { - throw new NoSuchTaskExecutionException(id); + throw new NoSuchTaskExecutionException(id, schemaTarget); } - taskExecution = this.taskSanitizer.sanitizeTaskExecutionArguments(taskExecution); - TaskManifest taskManifest = this.taskExecutionService.findTaskManifestById(id); + TaskManifest taskManifest = this.taskExecutionService.findTaskManifestById(id, schemaTarget); taskManifest = this.taskSanitizer.sanitizeTaskManifest(taskManifest); - List jobExecutionIds = new ArrayList<>(this.explorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId())); + List jobExecutionIds = new ArrayList<>(this.explorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId(), schemaTarget)); TaskJobExecutionRel taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, jobExecutionIds, - taskManifest, getCtrTaskJobExecution(taskExecution, jobExecutionIds)); + taskManifest, + getCtrTaskJobExecution(taskExecution, jobExecutionIds) + ); + return this.taskAssembler.toModel(taskJobExecutionRel); + } + @RequestMapping(value = "/external/{externalExecutionId}", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + public TaskExecutionResource viewByExternal( + @PathVariable(name = "externalExecutionId") String externalExecutionId, + @RequestParam(name = "platform", required = false) String platform + ) { + AggregateTaskExecution taskExecution = sanitizeTaskExecutionArguments(this.explorer.getTaskExecutionByExternalExecutionId(externalExecutionId, platform)); + if (taskExecution == null) { + throw new NoSuchTaskExecutionException(externalExecutionId, platform); + } + TaskManifest taskManifest = this.taskExecutionService.findTaskManifestById(taskExecution.getExecutionId(), taskExecution.getSchemaTarget()); + taskManifest = this.taskSanitizer.sanitizeTaskManifest(taskManifest); + List jobExecutionIds = new ArrayList<>(this.explorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId(), taskExecution.getSchemaTarget())); + TaskJobExecutionRel taskJobExecutionRel = new TaskJobExecutionRel( + taskExecution, + jobExecutionIds, + taskManifest, + getCtrTaskJobExecution(taskExecution, jobExecutionIds) + ); return this.taskAssembler.toModel(taskJobExecutionRel); } @@ -225,7 +311,7 @@ public Collection getCurrentTaskExecutionsInfo() executionInformation.forEach(platformTaskExecutionInformation -> { CurrentTaskExecutionsResource currentTaskExecutionsResource = - CurrentTaskExecutionsResource.fromTaskExecutionInformation(platformTaskExecutionInformation); + CurrentTaskExecutionsResource.fromTaskExecutionInformation(platformTaskExecutionInformation); resources.add(currentTaskExecutionsResource); }); @@ -237,76 +323,113 @@ public Collection getCurrentTaskExecutionsInfo() * optional {@code actions} parameter can be used to not only clean up task execution resources, * but can also trigger the deletion of task execution and job data in the persistence store. * - * @param ids The id of the {@link TaskExecution}s to clean up + * @param ids The id of the {@link TaskExecution}s to clean up * @param actions Defaults to "CLEANUP" if not specified */ @RequestMapping(value = "/{id}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.OK) - public void cleanup(@PathVariable("id") Set ids, - @RequestParam(defaultValue = "CLEANUP", name="action") TaskExecutionControllerDeleteAction[] actions) { + public void cleanup( + @PathVariable("id") Set ids, + @RequestParam(defaultValue = "CLEANUP", name = "action") TaskExecutionControllerDeleteAction[] actions, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget + ) { final Set actionsAsSet = new HashSet<>(Arrays.asList(actions)); - this.taskDeleteService.cleanupExecutions(actionsAsSet, ids); + this.taskDeleteService.cleanupExecutions(actionsAsSet, ids, schemaTarget); } /** * Cleanup resources associated with one or more task executions. The * optional {@code actions} and {@code completed} parameters can be used to not only clean up task execution resources, * but can also trigger the deletion of task execution and job data in the persistence store. + *

    + * When the {@code spring.cloud.dataflow.async.enabled} property is set to {@code true} the cleanup will happen + * asynchronously. * - * @param actions Defaults to "CLEANUP" if not specified - * @param completed Defaults to cleanup only completed task executions + * @param actions the actions to perform (default 'CLEANUP') + * @param completed whether to include only completed task executions (default false) + * @param taskName name of the task (default '') + * @param days only include tasks that have ended at least this many days ago (default null) */ @RequestMapping(method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.OK) + @Async(DataflowAsyncAutoConfiguration.DATAFLOW_ASYNC_EXECUTOR) public void cleanupAll( - @RequestParam(defaultValue = "CLEANUP", name="action") TaskExecutionControllerDeleteAction[] actions, - @RequestParam(defaultValue = "false", name="completed") boolean completed, - @RequestParam(defaultValue = "", name="name") String taskName) { - - this.taskDeleteService.cleanupExecutions(new HashSet<>(Arrays.asList(actions)), - this.taskExecutionService.getAllTaskExecutionIds(completed, taskName)); + @RequestParam(defaultValue = "CLEANUP", name = "action") TaskExecutionControllerDeleteAction[] actions, + @RequestParam(defaultValue = "false", name = "completed") boolean completed, + @RequestParam(defaultValue = "", name = "name") String taskName, + @RequestParam(name="days", required = false) Integer days + ) { + this.taskDeleteService.cleanupExecutions(new HashSet<>(Arrays.asList(actions)), taskName, completed, days); } /** * Stop a set of task executions. * - * @param ids the ids of the {@link TaskExecution}s to stop + * @param ids the ids of the {@link TaskExecution}s to stop * @param platform the platform name */ @RequestMapping(value = "/{id}", method = RequestMethod.POST) @ResponseStatus(HttpStatus.OK) - public void stop(@PathVariable("id") Set ids, - @RequestParam(defaultValue = "", name="platform") String platform) { - this.taskExecutionService.stopTaskExecution(ids, platform); + public void stop( + @PathVariable("id") Set ids, + @RequestParam(defaultValue = "", name = "platform") String platform, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget + ) { + this.taskExecutionService.stopTaskExecution(ids, schemaTarget, platform); } - private Page getPageableRelationships(Page taskExecutions, Pageable pageable) { - List taskJobExecutionRels = new ArrayList<>(); - for (TaskExecution taskExecution : taskExecutions.getContent()) { - TaskManifest taskManifest = this.taskExecutionService.findTaskManifestById(taskExecution.getExecutionId()); - taskManifest = this.taskSanitizer.sanitizeTaskManifest(taskManifest); - List jobExecutionIds = new ArrayList<>( - this.explorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId())); - taskJobExecutionRels - .add(new TaskJobExecutionRel(this.taskSanitizer.sanitizeTaskExecutionArguments(taskExecution), - jobExecutionIds, - taskManifest, getCtrTaskJobExecution(taskExecution, jobExecutionIds))); + private Page getPageableRelationships(Page taskExecutions, Pageable pageable) { + final Map taskJobExecutionRelMap = new HashMap<>(); + Map> schemaGroups = taskExecutions.getContent() + .stream() + .collect(Collectors.groupingBy(AggregateTaskExecution::getSchemaTarget)); + schemaGroups.forEach((schemaTarget,aggregateTaskExecutions) -> { + Map taskMap = aggregateTaskExecutions.stream().collect(Collectors.toMap(AggregateTaskExecution::getExecutionId, Function.identity())); + Set ids = taskMap.keySet(); + Map manifests = this.taskExecutionService.findTaskManifestByIds(ids, schemaTarget); + Map> jobExecutionIdMap = this.taskJobService.getJobExecutionIdsByTaskExecutionIds(ids, schemaTarget); + taskMap.values().forEach(taskExecution -> { + long id = taskExecution.getExecutionId(); + TaskManifest taskManifest = manifests.get(id); + if(taskManifest != null) { + taskManifest = this.taskSanitizer.sanitizeTaskManifest(taskManifest); + } + Set jobIds = jobExecutionIdMap.computeIfAbsent(id, aLong -> new HashSet<>()); + List jobExecutionIds = new ArrayList<>(jobIds); + TaskJobExecutionRel rel = new TaskJobExecutionRel(sanitizeTaskExecutionArguments(taskExecution), + jobExecutionIds, + taskManifest, getCtrTaskJobExecution(taskExecution, jobExecutionIds)); + taskJobExecutionRelMap.put(schemaTarget + ":" + id, rel); + }); + }); + List taskJobExecutionContent = taskExecutions.stream() + .map(aggregateTaskExecution -> taskJobExecutionRelMap.get(aggregateTaskExecution.getSchemaTarget() + ":" + aggregateTaskExecution.getExecutionId())) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + return new PageImpl<>(taskJobExecutionContent, pageable, taskExecutions.getTotalElements()); + } + + + private AggregateTaskExecution sanitizeTaskExecutionArguments(AggregateTaskExecution taskExecution) { + if (taskExecution != null) { + List args = taskExecution.getArguments().stream() + .map(this.argumentSanitizer::sanitize).collect(Collectors.toList()); + taskExecution.setArguments(args); } - return new PageImpl<>(taskJobExecutionRels, pageable, taskExecutions.getTotalElements()); + return taskExecution; } - private TaskJobExecution getCtrTaskJobExecution(TaskExecution taskExecution, List jobExecutionIds) { + private TaskJobExecution getCtrTaskJobExecution(AggregateTaskExecution taskExecution, List jobExecutionIds) { TaskJobExecution taskJobExecution = null; TaskDefinition taskDefinition = this.taskDefinitionRepository.findByTaskName(taskExecution.getTaskName()); - if(taskDefinition != null) { - TaskParser parser = new TaskParser(taskExecution.getTaskName(), taskDefinition.getDslText(),true, false); - if(jobExecutionIds.size() > 0 && parser.parse().isComposed()) { + if (taskDefinition != null) { + TaskParser parser = new TaskParser(taskExecution.getTaskName(), taskDefinition.getDslText(), true, false); + if (!jobExecutionIds.isEmpty() && parser.parse().isComposed()) { try { - taskJobExecution = this.taskJobService.getJobExecution(jobExecutionIds.toArray(new Long[0])[0]); - } - catch(NoSuchJobExecutionException noSuchJobExecutionException) { - this.logger.warn(String.format("Job Execution for Task Execution %s could not be found.", - taskExecution.getExecutionId()), noSuchJobExecutionException); + taskJobExecution = this.taskJobService.getJobExecution(jobExecutionIds.toArray(new Long[0])[0], taskExecution.getSchemaTarget()); + } catch (NoSuchJobExecutionException noSuchJobExecutionException) { + this.logger.warn("Job Execution for Task Execution {} could not be found.", + taskExecution.getExecutionId()); } } } @@ -339,12 +462,26 @@ public Assembler() { @Override public TaskExecutionResource toModel(TaskJobExecutionRel taskJobExecutionRel) { - return createModelWithId(taskJobExecutionRel.getTaskExecution().getExecutionId(), taskJobExecutionRel); + + TaskExecutionResource resource = new TaskExecutionResource(taskJobExecutionRel); + resource.add( + linkTo( + methodOn(TaskLogsController.class) + .getLog(resource.getExternalExecutionId(), resource.getPlatformName(), resource.getSchemaTarget()) + ).withRel("tasks/logs") + ); + + resource.add( + linkTo( + methodOn(TaskExecutionController.class) + .view(taskJobExecutionRel.getTaskExecution().getExecutionId(), taskJobExecutionRel.getTaskExecution().getSchemaTarget()) + ).withSelfRel()); + return resource; } @Override public TaskExecutionResource instantiateModel(TaskJobExecutionRel taskJobExecutionRel) { - return new TaskExecutionResource(taskJobExecutionRel); + return toModel(taskJobExecutionRel); } } @@ -371,5 +508,16 @@ public TaskExecutionsInfoResource instantiateModel(Integer totalExecutions) { return taskExecutionsInfoResource; } } + private static class LaunchResponseAssembler extends RepresentationModelAssemblerSupport { + public LaunchResponseAssembler() { + super(TaskExecutionController.class, LaunchResponseResource.class); + } + @Override + public LaunchResponseResource toModel(LaunchResponse entity) { + LaunchResponseResource resource = new LaunchResponseResource(entity.getExecutionId(), entity.getSchemaTarget()); + resource.add(linkTo(methodOn(TaskExecutionController.class).view(entity.getExecutionId(), entity.getSchemaTarget())).withSelfRel()); + return resource; + } + } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionThinController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionThinController.java new file mode 100644 index 0000000000..da44e34c76 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionThinController.java @@ -0,0 +1,96 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.controller; + +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; +import org.springframework.cloud.dataflow.rest.resource.TaskExecutionThinResource; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.server.repository.NoSuchTaskDefinitionException; +import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; +import org.springframework.cloud.dataflow.server.service.TaskJobService; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.web.PagedResourcesAssembler; +import org.springframework.hateoas.PagedModel; +import org.springframework.hateoas.server.ExposesResourceFor; +import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; + +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + +/** + * This controller provides for retrieving a thin task execution resource that will satisfy UI paging with embedded links to more detail. + * @author Corneil du Plessis + */ +@RestController +@RequestMapping("/tasks/thinexecutions") +@ExposesResourceFor(TaskExecutionThinResource.class) +public class TaskExecutionThinController { + + private final AggregateTaskExplorer explorer; + private final TaskDefinitionRepository taskDefinitionRepository; + private final TaskExecutionThinResourceAssembler resourceAssembler; + + private final TaskJobService taskJobService; + + public TaskExecutionThinController(AggregateTaskExplorer explorer, TaskDefinitionRepository taskDefinitionRepository, TaskJobService taskJobService) { + this.explorer = explorer; + this.taskDefinitionRepository = taskDefinitionRepository; + this.taskJobService = taskJobService; + this.resourceAssembler = new TaskExecutionThinResourceAssembler(); + } + + @GetMapping(produces = "application/json") + @ResponseStatus(HttpStatus.OK) + public PagedModel listTasks(Pageable pageable, PagedResourcesAssembler pagedAssembler) { + Page page = explorer.findAll(pageable, true); + taskJobService.populateComposeTaskRunnerStatus(page.getContent()); + return pagedAssembler.toModel(page, resourceAssembler); + } + + @RequestMapping(value = "", method = RequestMethod.GET, params = "name") + @ResponseStatus(HttpStatus.OK) + public PagedModel retrieveTasksByName(@RequestParam("name") String taskName, + Pageable pageable, PagedResourcesAssembler pagedAssembler) { + long tasks = this.taskDefinitionRepository.countByTaskName(taskName); + if(tasks == 0) { + throw new NoSuchTaskDefinitionException(taskName); + } + Page page = this.explorer.findTaskExecutionsByName(taskName, pageable); + taskJobService.populateComposeTaskRunnerStatus(page.getContent()); + return pagedAssembler.toModel(page, resourceAssembler); + } + + static class TaskExecutionThinResourceAssembler extends RepresentationModelAssemblerSupport { + public TaskExecutionThinResourceAssembler() { + super(TaskExecutionThinController.class, TaskExecutionThinResource.class); + } + @Override + public TaskExecutionThinResource toModel(AggregateTaskExecution entity) { + TaskExecutionThinResource resource = new TaskExecutionThinResource(entity); + resource.add(linkTo(methodOn(TaskExecutionController.class).view(resource.getExecutionId(), resource.getSchemaTarget())).withSelfRel()); + resource.add(linkTo(methodOn(TaskDefinitionController.class).display(resource.getTaskName(), true)).withRel("tasks/definitions")); + return resource; + } + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskLogsController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskLogsController.java index 4a3e37801c..cb13849e73 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskLogsController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskLogsController.java @@ -57,7 +57,11 @@ public TaskLogsController(TaskExecutionService taskExecutionService) { */ @RequestMapping(value = "/{taskExternalExecutionId}", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public ResponseEntity getLog(@PathVariable String taskExternalExecutionId, @RequestParam(required = false, defaultValue = "default") String platformName) { - return new ResponseEntity<>(this.taskExecutionService.getLog(platformName, taskExternalExecutionId), HttpStatus.OK); + public ResponseEntity getLog( + @PathVariable String taskExternalExecutionId, + @RequestParam(name = "platformName", required = false, defaultValue = "default") String platformName, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget + ) { + return new ResponseEntity<>(this.taskExecutionService.getLog(platformName, taskExternalExecutionId, schemaTarget), HttpStatus.OK); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskPlatformController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskPlatformController.java index cd46b21afe..fec9fc079b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskPlatformController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskPlatformController.java @@ -52,14 +52,17 @@ public TaskPlatformController(LauncherService launcherService) { /** * Returns the list of platform accounts available for launching tasks. * @param pageable the Pageable request - * @param assembler the paged resource assembler for Launcher + * @param schedulesEnabled optional criteria to indicate enabled schedules. + * @param assembler the paged resource assembler for Launcher* * @return the paged resources of type {@link LauncherResource} */ @RequestMapping(value = "", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public PagedModel list(Pageable pageable, + public PagedModel list( + Pageable pageable, @RequestParam(value = "schedulesEnabled", required = false) String schedulesEnabled, - PagedResourcesAssembler assembler) { + PagedResourcesAssembler assembler + ) { PagedModel result; if(StringUtils.hasText(schedulesEnabled) && schedulesEnabled.toLowerCase().equals("true")) { result = assembler.toModel(this.launcherService.getLaunchersWithSchedules(pageable), this.launcherAssembler); @@ -70,7 +73,7 @@ public PagedModel list(Pageable pageable, } /** - * {@link org.springframework.hateoas.server.ResourceAssembler} implementation that converts + * {@link org.springframework.hateoas.server.RepresentationModelAssembler} implementation that converts * {@link Launcher}s to {@link LauncherResource}s. */ private static class Assembler extends RepresentationModelAssemblerSupport { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskSchedulerController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskSchedulerController.java index 71bfa8df47..800a214eaa 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskSchedulerController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskSchedulerController.java @@ -81,9 +81,11 @@ public TaskSchedulerController(SchedulerService schedulerService) { */ @RequestMapping(value = "", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public PagedModel list(Pageable pageable, + public PagedModel list( + Pageable pageable, @RequestParam(value = "platform", required = false) String platform, - PagedResourcesAssembler assembler) { + PagedResourcesAssembler assembler + ) { List result = this.schedulerService.listForPlatform(platform); return assembler.toModel(new PageImpl<>(result, pageable, result.size()), taskAssembler); } @@ -97,8 +99,10 @@ public PagedModel list(Pageable pageable, */ @RequestMapping(value = "/{name}", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public ScheduleInfoResource getSchedule(@PathVariable("name") String scheduleName, - @RequestParam(value = "platform", required = false) String platform) { + public ScheduleInfoResource getSchedule( + @PathVariable("name") String scheduleName, + @RequestParam(value = "platform", required = false) String platform + ) { ScheduleInfo schedule = this.schedulerService.getSchedule(scheduleName, platform); if (schedule == null) { throw new NoSuchScheduleException(String.format("Schedule [%s] doesn't exist" , scheduleName)); @@ -116,9 +120,11 @@ public ScheduleInfoResource getSchedule(@PathVariable("name") String scheduleNam * @return a list of Schedules. */ @RequestMapping("/instances/{taskDefinitionName}") - public PagedModel filteredList(@PathVariable String taskDefinitionName, + public PagedModel filteredList( + @PathVariable String taskDefinitionName, @RequestParam(value = "platform", required = false) String platform, - PagedResourcesAssembler assembler) { + PagedResourcesAssembler assembler + ) { List result = this.schedulerService.list(taskDefinitionName, platform); int resultSize = result.size(); Pageable pageable = PageRequest.of(0, @@ -151,11 +157,13 @@ public void deleteSchedulesforDefinition(@PathVariable String taskDefinitionName */ @RequestMapping(value = "", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) - public void save(@RequestParam("scheduleName") String scheduleName, + public void save( + @RequestParam("scheduleName") String scheduleName, @RequestParam("taskDefinitionName") String taskDefinitionName, @RequestParam String properties, @RequestParam(required = false) String arguments, - @RequestParam(value = "platform", required = false) String platform) { + @RequestParam(value = "platform", required = false) String platform + ) { Map propertiesToUse = DeploymentPropertiesUtils.parse(properties); List argumentsToUse = DeploymentPropertiesUtils.parseArgumentList(arguments, " "); this.schedulerService.schedule(StringUtils.trimWhitespace(scheduleName), taskDefinitionName, @@ -170,8 +178,10 @@ public void save(@RequestParam("scheduleName") String scheduleName, */ @RequestMapping(value = "/{scheduleName}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.OK) - public void unschedule(@PathVariable("scheduleName") String scheduleName, - @RequestParam(value = "platform", required = false) String platform) { + public void unschedule( + @PathVariable("scheduleName") String scheduleName, + @RequestParam(value = "platform", required = false) String platform + ) { schedulerService.unschedule(scheduleName, platform); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TasksInfoController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TasksInfoController.java index c8d4ebbed8..32feee37e8 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TasksInfoController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TasksInfoController.java @@ -60,9 +60,12 @@ public TasksInfoController(TaskExecutionService taskExecutionService) { @RequestMapping(value= "executions", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public TaskExecutionsInfoResource getInfo(@RequestParam(required = false, defaultValue = "false", name="completed") String completed, - @RequestParam(required = false, defaultValue = "", name="name") String taskName) { - return this.taskExecutionsAssembler.toModel(this.taskExecutionService.getAllTaskExecutionsCount(Boolean.valueOf(completed), taskName)); + public TaskExecutionsInfoResource getInfo( + @RequestParam(required = false, defaultValue = "false", name="completed") String completed, + @RequestParam(required = false, defaultValue = "", name="name") String taskName, + @RequestParam(required = false, name="days") Integer days + ) { + return this.taskExecutionsAssembler.toModel(this.taskExecutionService.getAllTaskExecutionsCount(Boolean.parseBoolean(completed), taskName, days)); } /** diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/VisibleProperties.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/VisibleProperties.java index b01ab79d2b..819f0e6417 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/VisibleProperties.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/VisibleProperties.java @@ -22,6 +22,8 @@ import java.util.Map; import java.util.Set; +import org.slf4j.LoggerFactory; + import org.springframework.boot.configurationmetadata.ConfigurationMetadataProperty; import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver; import org.springframework.cloud.dataflow.core.RelaxedNames; @@ -71,10 +73,18 @@ public Map qualifyProperties(Map properties, Res String provided = entry.getKey(); if (!allProps.contains(provided)) { List longForms = null; - for (String relaxed : new RelaxedNames(provided)) { - longForms = visible.get(relaxed); - if (longForms != null) { - break; + RelaxedNames relaxedNames = null; + try { + relaxedNames = new RelaxedNames(provided); + } catch (Exception x) { + LoggerFactory.getLogger(getClass()).error("Exception determining relaxed name for " + provided, x); + } + if(relaxedNames != null) { + for (String relaxed : relaxedNames) { + longForms = visible.get(relaxed); + if (longForms != null) { + break; + } } } if (longForms != null) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultAppRegistrationAssembler.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultAppRegistrationAssembler.java index fb26ef836f..1dbcc113d6 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultAppRegistrationAssembler.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultAppRegistrationAssembler.java @@ -38,11 +38,23 @@ public R toModel(AppRegistration registration) { @Override protected R instantiateModel(AppRegistration registration) { - AppRegistrationResource appRegistrationResource = (registration.getVersions() == null) ? new AppRegistrationResource(registration.getName(), registration.getType().name(), - registration.getVersion(), registration.getUri().toString(), registration.isDefaultVersion()) : - new AppRegistrationResource(registration.getName(), registration.getType().name(), - registration.getVersion(), registration.getUri().toString(), registration.isDefaultVersion(), - registration.getVersions()); + AppRegistrationResource appRegistrationResource = (registration.getVersions() == null) + ? new AppRegistrationResource( + registration.getName(), + registration.getType().name(), + registration.getVersion(), + registration.getUri().toString(), + registration.getBootVersion(), + registration.isDefaultVersion() + ) : new AppRegistrationResource( + registration.getName(), + registration.getType().name(), + registration.getVersion(), + registration.getUri().toString(), + registration.getBootVersion(), + registration.isDefaultVersion(), + registration.getVersions() + ); return (R) appRegistrationResource; } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssembler.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssembler.java index a6d8162617..1dded4e847 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssembler.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssembler.java @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 the original author or authors. + * Copyright 2020-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,7 +15,9 @@ */ package org.springframework.cloud.dataflow.server.controller.assembler; +import java.util.List; import java.util.Set; +import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,18 +30,24 @@ import org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource; import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; import org.springframework.cloud.dataflow.rest.util.TaskSanitizer; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; import org.springframework.cloud.dataflow.server.controller.TaskDefinitionController; import org.springframework.cloud.dataflow.server.controller.support.TaskExecutionAwareTaskDefinition; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; import org.springframework.cloud.dataflow.server.service.TaskExecutionService; import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.cloud.dataflow.server.service.impl.TaskServiceUtils; -import org.springframework.cloud.task.repository.TaskExecution; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; /** * {@link org.springframework.hateoas.server.RepresentationModelAssembler} implementation that converts * {@link TaskDefinition}s to {@link TaskDefinitionResource}s. + * + * @author Ilayaperumal Gopinathan + * @author Evgeniy Bezdomnikov + * @author Glenn Renfro + * @author Chris Bono */ public class DefaultTaskDefinitionAssembler extends RepresentationModelAssemblerSupport { @@ -50,7 +58,7 @@ public class DefaultTaskDefinitionAssembler ex private final TaskJobService taskJobService; - private final TaskExplorer taskExplorer; + private final AggregateTaskExplorer taskExplorer; private final TaskSanitizer taskSanitizer = new TaskSanitizer(); @@ -58,33 +66,42 @@ public class DefaultTaskDefinitionAssembler ex private final ArgumentSanitizer argumentSanitizer = new ArgumentSanitizer(); - public DefaultTaskDefinitionAssembler(TaskExecutionService taskExecutionService, boolean enableManifest, - Class classType, TaskJobService taskJobService, TaskExplorer taskExplorer) { + private final AggregateExecutionSupport aggregateExecutionSupport; + + public DefaultTaskDefinitionAssembler( + TaskExecutionService taskExecutionService, + boolean enableManifest, + Class classType, + TaskJobService taskJobService, + AggregateTaskExplorer taskExplorer, + AggregateExecutionSupport aggregateExecutionSupport) { super(TaskDefinitionController.class, classType); this.taskExecutionService = taskExecutionService; this.enableManifest = enableManifest; this.taskJobService = taskJobService; this.taskExplorer = taskExplorer; + this.aggregateExecutionSupport = aggregateExecutionSupport; } TaskDefinitionResource updateTaskExecutionResource( TaskExecutionAwareTaskDefinition taskExecutionAwareTaskDefinition, TaskDefinitionResource taskDefinitionResource, boolean manifest) { - TaskExecution taskExecution = taskExecutionAwareTaskDefinition.getLatestTaskExecution(); - taskExecution = this.taskSanitizer.sanitizeTaskExecutionArguments(taskExecution); - TaskManifest taskManifest = this.taskExecutionService.findTaskManifestById(taskExecution.getExecutionId()); - taskManifest = this.taskSanitizer.sanitizeTaskManifest(taskManifest); + AggregateTaskExecution taskExecution = this.sanitizeTaskExecutionArguments(taskExecutionAwareTaskDefinition.getLatestTaskExecution()); + TaskManifest taskManifest = null; + if (manifest) { + taskManifest = this.taskExecutionService.findTaskManifestById(taskExecution.getExecutionId(), taskExecution.getSchemaTarget()); + taskManifest = this.taskSanitizer.sanitizeTaskManifest(taskManifest); + } TaskJobExecution composedTaskJobExecution = null; - if(taskExecution != null && taskDefinitionResource.isComposed()) { - Set jobExecutionIds = this.taskExplorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId()); + if (taskExecution != null && taskDefinitionResource.isComposed()) { + Set jobExecutionIds = this.taskExplorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId(), taskExecution.getSchemaTarget()); if(jobExecutionIds != null && jobExecutionIds.size() > 0) { try { - composedTaskJobExecution = this.taskJobService.getJobExecution(jobExecutionIds.toArray(new Long[0])[0]); - } - catch(NoSuchJobExecutionException noSuchJobExecutionException) { - logger.warn(String.format("Job Execution for Task Execution %s could not be found.", - taskExecution.getExecutionId()), noSuchJobExecutionException); + composedTaskJobExecution = this.taskJobService.getJobExecution(jobExecutionIds.toArray(new Long[0])[0], taskExecution.getSchemaTarget()); + } catch (NoSuchJobExecutionException noSuchJobExecutionException) { + logger.warn("Job Execution for Task Execution {} could not be found.", + taskExecution.getExecutionId()); } } } @@ -94,7 +111,12 @@ TaskDefinitionResource updateTaskExecutionResource( taskDefinitionResource.setLastTaskExecution(taskExecutionResource); return taskDefinitionResource; } - + private AggregateTaskExecution sanitizeTaskExecutionArguments(AggregateTaskExecution taskExecution) { + List args = taskExecution.getArguments().stream() + .map(this.argumentSanitizer::sanitize).collect(Collectors.toList()); + taskExecution.setArguments(args); + return taskExecution; + } @Override public R toModel(TaskExecutionAwareTaskDefinition taskExecutionAwareTaskDefinition) { return createModelWithId(taskExecutionAwareTaskDefinition.getTaskDefinition().getName(), diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssemblerProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssemblerProvider.java index eb72a5f2b0..3a8274b83b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssemblerProvider.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssemblerProvider.java @@ -16,31 +16,47 @@ package org.springframework.cloud.dataflow.server.controller.assembler; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; import org.springframework.cloud.dataflow.server.service.TaskExecutionService; import org.springframework.cloud.dataflow.server.service.TaskJobService; -import org.springframework.cloud.task.repository.TaskExplorer; +import org.springframework.util.Assert; /** * Default REST resource assembler that returns the {@link TaskDefinitionResource} type. + * * @author Ilayaperumal Gopinathan * @author Glenn Renfro */ public class DefaultTaskDefinitionAssemblerProvider implements TaskDefinitionAssemblerProvider { private final TaskExecutionService taskExecutionService; - private final TaskExplorer taskExplorer; + + private final AggregateTaskExplorer taskExplorer; + private final TaskJobService taskJobService; - public DefaultTaskDefinitionAssemblerProvider(TaskExecutionService taskExecutionService, - TaskJobService taskJobService, TaskExplorer taskExplorer) { + private final AggregateExecutionSupport aggregateExecutionSupport; + + public DefaultTaskDefinitionAssemblerProvider( + TaskExecutionService taskExecutionService, + TaskJobService taskJobService, + AggregateTaskExplorer taskExplorer, + AggregateExecutionSupport aggregateExecutionSupport + ) { + Assert.notNull(taskExecutionService, "taskExecutionService required"); + Assert.notNull(taskJobService, "taskJobService required"); + Assert.notNull(taskExplorer, "taskExplorer required"); + Assert.notNull(aggregateExecutionSupport, "aggregateExecutionSupport required"); this.taskExecutionService = taskExecutionService; this.taskJobService = taskJobService; this.taskExplorer = taskExplorer; + this.aggregateExecutionSupport = aggregateExecutionSupport; } @Override public DefaultTaskDefinitionAssembler getTaskDefinitionAssembler(boolean enableManifest) { return new DefaultTaskDefinitionAssembler(taskExecutionService, enableManifest, - TaskDefinitionResource.class, taskJobService, taskExplorer); + TaskDefinitionResource.class, taskJobService, taskExplorer, aggregateExecutionSupport); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/security/SecurityController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/security/SecurityController.java index c5afcc2dc9..7651ca1c78 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/security/SecurityController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/security/SecurityController.java @@ -20,7 +20,6 @@ import org.springframework.cloud.common.security.support.SecurityStateBean; import org.springframework.cloud.dataflow.rest.resource.security.SecurityInfoResource; import org.springframework.hateoas.server.ExposesResourceFor; -import org.springframework.hateoas.server.mvc.WebMvcLinkBuilder; import org.springframework.http.HttpStatus; import org.springframework.security.authentication.AnonymousAuthenticationToken; import org.springframework.security.core.Authentication; @@ -32,6 +31,8 @@ import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; + /** * Provides security-related meta information. Provides one REST endpoint at present time * {@code /security/info} that provides information such as whether security is enabled @@ -69,7 +70,7 @@ public SecurityInfoResource getSecurityInfo() { final SecurityInfoResource securityInfo = new SecurityInfoResource(); securityInfo.setAuthenticationEnabled(authenticationEnabled); - securityInfo.add(WebMvcLinkBuilder.linkTo(SecurityController.class).withSelfRel()); + securityInfo.add(linkTo(SecurityController.class).withSelfRel()); if (authenticationEnabled && SecurityContextHolder.getContext() != null) { final Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); @@ -77,9 +78,8 @@ public SecurityInfoResource getSecurityInfo() { securityInfo.setAuthenticated(authentication.isAuthenticated()); securityInfo.setUsername(authentication.getName()); - for (Object authority : authentication.getAuthorities()) { - final GrantedAuthority grantedAuthority = (GrantedAuthority) authority; - securityInfo.addRole(grantedAuthority.getAuthority()); + for (GrantedAuthority authority : authentication.getAuthorities()) { + securityInfo.addRole(authority.getAuthority()); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionAwareTaskDefinition.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionAwareTaskDefinition.java index 86874bed21..cba3c7f7f7 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionAwareTaskDefinition.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionAwareTaskDefinition.java @@ -16,6 +16,7 @@ package org.springframework.cloud.dataflow.server.controller.support; import org.springframework.cloud.dataflow.core.TaskDefinition; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.util.Assert; @@ -29,7 +30,7 @@ public class TaskExecutionAwareTaskDefinition { final TaskDefinition taskDefinition; - final TaskExecution latestTaskExecution; + final AggregateTaskExecution latestTaskExecution; /** * Initialized the {@link TaskExecutionAwareTaskDefinition} with the provided @@ -38,7 +39,7 @@ public class TaskExecutionAwareTaskDefinition { * @param taskDefinition Must not be null * @param latestTaskExecution Must not be null */ - public TaskExecutionAwareTaskDefinition(TaskDefinition taskDefinition, TaskExecution latestTaskExecution) { + public TaskExecutionAwareTaskDefinition(TaskDefinition taskDefinition, AggregateTaskExecution latestTaskExecution) { super(); Assert.notNull(taskDefinition, "The provided taskDefinition must not be null."); @@ -78,7 +79,7 @@ public TaskDefinition getTaskDefinition() { * * @return May return null */ - public TaskExecution getLatestTaskExecution() { + public AggregateTaskExecution getLatestTaskExecution() { return latestTaskExecution; } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionControllerDeleteAction.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionControllerDeleteAction.java index b25f756ea4..d5f75e36f5 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionControllerDeleteAction.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionControllerDeleteAction.java @@ -15,10 +15,12 @@ */ package org.springframework.cloud.dataflow.server.controller.support; +import java.util.Set; + import org.springframework.cloud.dataflow.server.controller.TaskExecutionController; /** - * This enum is used by the {@link TaskExecutionController#cleanup(java.util.Set, TaskExecutionControllerDeleteAction[])}. + * This enum is used by the {@link TaskExecutionController#cleanup(Set, TaskExecutionControllerDeleteAction[], String)}. * * @author Gunnar Hillert * diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/converter/AbstractDateTimeConverter.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/converter/AbstractDateTimeConverter.java new file mode 100644 index 0000000000..057c4da0cd --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/converter/AbstractDateTimeConverter.java @@ -0,0 +1,38 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.converter; + +import java.time.format.DateTimeFormatter; + +/** + * Base class for date/time converters. + * To be discarded when moving to Boot 3.x and the converters from org.springframework.batch.core.converter used instead. + * @author Mahmoud Ben Hassine + * @author Corneil du Plessis + * @since 2.11.2 + */ +@Deprecated +class AbstractDateTimeConverter { + + protected DateTimeFormatter instantFormatter = DateTimeFormatter.ISO_INSTANT; + + protected DateTimeFormatter localDateFormatter = DateTimeFormatter.ISO_LOCAL_DATE; + + protected DateTimeFormatter localTimeFormatter = DateTimeFormatter.ISO_LOCAL_TIME; + + protected DateTimeFormatter localDateTimeFormatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME; + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/converter/DateToStringConverter.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/converter/DateToStringConverter.java new file mode 100644 index 0000000000..7f77db7d41 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/converter/DateToStringConverter.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.converter; + +import java.util.Date; + +import org.springframework.core.convert.converter.Converter; + +/** + * {@link Converter} implementation from {@link Date} to {@link String}. + * To be discarded when moving to Boot 3.x and the converters from org.springframework.batch.core.converter used instead. + * This converter formats dates according to the + * {@link java.time.format.DateTimeFormatter#ISO_INSTANT} format. + * To be discarded when moving to Boot 3.x and the converters from org.springframework.batch.core.converter used instead. + * @author Mahmoud Ben Hassine + * @author Corneil du Plessis + * @since 2.11.2 + */ +@Deprecated +public class DateToStringConverter extends AbstractDateTimeConverter implements Converter { + + @Override + public String convert(Date source) { + return super.instantFormatter.format(source.toInstant()); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/converter/StringToDateConverter.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/converter/StringToDateConverter.java new file mode 100644 index 0000000000..9ae76fce1f --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/converter/StringToDateConverter.java @@ -0,0 +1,41 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.converter; + +import java.time.Instant; +import java.util.Date; + +import org.springframework.core.convert.converter.Converter; + +/** + * {@link Converter} implementation from {@link String} to {@link Date}. + * + * This converter expects strings in the + * {@link java.time.format.DateTimeFormatter#ISO_INSTANT} format. + * To be discarded when moving to Boot 3.x and the converters from org.springframework.batch.core.converter used instead. + * @author Mahmoud Ben Hassine + * @author Corneil du Plessis + * @since 2.11.2 + */ +@Deprecated +public class StringToDateConverter extends AbstractDateTimeConverter implements Converter { + + @Override + public Date convert(String source) { + return Date.from(super.instantFormatter.parse(source, Instant::from)); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractAggregateViewMigration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractAggregateViewMigration.java new file mode 100644 index 0000000000..876555c91f --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractAggregateViewMigration.java @@ -0,0 +1,64 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; + +public abstract class AbstractAggregateViewMigration extends AbstractMigration { + public AbstractAggregateViewMigration() { + super(null); + } + + public final static String CREATE_AGGREGATE_TASK_EXECUTION_VIEW = "CREATE VIEW AGGREGATE_TASK_EXECUTION AS\n" + + " SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID, 'boot2' AS SCHEMA_TARGET FROM TASK_EXECUTION\n" + + "UNION ALL\n" + + " SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID, 'boot3' AS SCHEMA_TARGET FROM BOOT3_TASK_EXECUTION"; + + public final static String CREATE_AGGREGATE_TASK_EXECUTION_PARAMS_VIEW = "CREATE VIEW AGGREGATE_TASK_EXECUTION_PARAMS AS\n" + + " SELECT TASK_EXECUTION_ID, TASK_PARAM, 'boot2' AS SCHEMA_TARGET FROM TASK_EXECUTION_PARAMS\n" + + "UNION ALL\n" + + " SELECT TASK_EXECUTION_ID, TASK_PARAM, 'boot3' AS SCHEMA_TARGET FROM BOOT3_TASK_EXECUTION_PARAMS"; + public final static String CREATE_AGGREGATE_JOB_EXECUTION_VIEW = "CREATE VIEW AGGREGATE_JOB_EXECUTION AS\n" + + " SELECT JOB_EXECUTION_ID, VERSION, JOB_INSTANCE_ID, CREATE_TIME, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, LAST_UPDATED, 'boot2' AS SCHEMA_TARGET FROM BATCH_JOB_EXECUTION\n" + + "UNION ALL\n" + + " SELECT JOB_EXECUTION_ID, VERSION, JOB_INSTANCE_ID, CREATE_TIME, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, LAST_UPDATED, 'boot3' AS SCHEMA_TARGET FROM BOOT3_BATCH_JOB_EXECUTION"; + public final static String CREATE_AGGREGATE_JOB_INSTANCE_VIEW = "CREATE VIEW AGGREGATE_JOB_INSTANCE AS\n" + + " SELECT JOB_INSTANCE_ID, VERSION, JOB_NAME, JOB_KEY, 'boot2' AS SCHEMA_TARGET FROM BATCH_JOB_INSTANCE\n" + + "UNION ALL\n" + + " SELECT JOB_INSTANCE_ID, VERSION, JOB_NAME, JOB_KEY, 'boot3' AS SCHEMA_TARGET FROM BOOT3_BATCH_JOB_INSTANCE"; + public final static String CREATE_AGGREGATE_TASK_BATCH_VIEW = "CREATE VIEW AGGREGATE_TASK_BATCH AS\n" + + " SELECT TASK_EXECUTION_ID, JOB_EXECUTION_ID, 'boot2' AS SCHEMA_TARGET FROM TASK_TASK_BATCH\n" + + "UNION ALL\n" + + " SELECT TASK_EXECUTION_ID, JOB_EXECUTION_ID, 'boot3' AS SCHEMA_TARGET FROM BOOT3_TASK_TASK_BATCH"; + public final static String CREATE_AGGREGATE_STEP_EXECUTION_VIEW = "CREATE VIEW AGGREGATE_STEP_EXECUTION AS\n" + + " SELECT STEP_EXECUTION_ID, VERSION, STEP_NAME, JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, COMMIT_COUNT, READ_COUNT, FILTER_COUNT, WRITE_COUNT, READ_SKIP_COUNT, WRITE_SKIP_COUNT, PROCESS_SKIP_COUNT, ROLLBACK_COUNT, EXIT_CODE, EXIT_MESSAGE, LAST_UPDATED, 'boot2' AS SCHEMA_TARGET FROM BATCH_STEP_EXECUTION\n" + + "UNION ALL\n" + + " SELECT STEP_EXECUTION_ID, VERSION, STEP_NAME, JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, COMMIT_COUNT, READ_COUNT, FILTER_COUNT, WRITE_COUNT, READ_SKIP_COUNT, WRITE_SKIP_COUNT, PROCESS_SKIP_COUNT, ROLLBACK_COUNT, EXIT_CODE, EXIT_MESSAGE, LAST_UPDATED, 'boot3' AS SCHEMA_TARGET FROM BOOT3_BATCH_STEP_EXECUTION"; + @Override + public List getCommands() { + return Arrays.asList( + SqlCommand.from(CREATE_AGGREGATE_TASK_EXECUTION_VIEW), + SqlCommand.from(CREATE_AGGREGATE_TASK_EXECUTION_PARAMS_VIEW), + SqlCommand.from(CREATE_AGGREGATE_TASK_BATCH_VIEW), + SqlCommand.from(CREATE_AGGREGATE_JOB_EXECUTION_VIEW), + SqlCommand.from(CREATE_AGGREGATE_JOB_INSTANCE_VIEW), + SqlCommand.from(CREATE_AGGREGATE_STEP_EXECUTION_VIEW)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBoot3InitialSetupMigration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBoot3InitialSetupMigration.java new file mode 100644 index 0000000000..77fc6bc0b4 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBoot3InitialSetupMigration.java @@ -0,0 +1,56 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; + +/** + * Base implementation for initial Boot 3 schema + * + * @author Chris Bono + */ +public abstract class AbstractBoot3InitialSetupMigration extends AbstractMigration { + public AbstractBoot3InitialSetupMigration() { + super(null); + } + + @Override + public List getCommands() { + List commands = new ArrayList<>(); + commands.addAll(createTask3Tables()); + commands.addAll(createBatch5Tables()); + return commands; + } + + /** + * Creates the spring-cloud-task V3 tables. + * + * @return the list of sql commands + */ + public abstract List createTask3Tables(); + + /** + * Creates the spring-batch V5 tables. + * + * @return the list of sql commands + */ + public abstract List createBatch5Tables(); + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBootVersionMigration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBootVersionMigration.java new file mode 100644 index 0000000000..367f80b893 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBootVersionMigration.java @@ -0,0 +1,45 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +/** + * The boot_version field will provide for indicating the version of + * Spring Boot used by the application and by implication the + * schema version of task and batch tables. + * @author Corneil du Plessis + * @since 2.11 + */ +public abstract class AbstractBootVersionMigration extends AbstractMigration { + private static final String ADD_BOOT_VERSION = "alter table app_registration add boot_version varchar(16)"; + + public AbstractBootVersionMigration() { + super(null); + } + + @Override + public List getCommands() { + return Collections.singletonList(SqlCommand.from(ADD_BOOT_VERSION)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCaseSensitiveMigration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCaseSensitiveMigration.java new file mode 100644 index 0000000000..ed9b01321c --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCaseSensitiveMigration.java @@ -0,0 +1,53 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; + +/** + * Provides for converting lower case table names to uppercase to ensure it works correctly with a MariaDB or MySQL installation with case-sensitive table or column names. + * @author Corneil du Plessis + */ +public abstract class AbstractCaseSensitiveMigration extends AbstractMigration { + protected final static String RENAME_TASK_EXECUTION_METADATA_LC = "alter table task_execution_metadata rename to task_execution_metadata_lc"; + + protected final static String RENAME_TASK_EXECUTION_METADATA = "alter table task_execution_metadata_lc rename to TASK_EXECUTION_METADATA"; + + protected final static String RENAME_TASK_EXECUTION_METADATA_SEQ_LC_TBL = "alter table task_execution_metadata_seq rename to task_execution_metadata_seq_lc"; + + protected final static String RENAME_TASK_EXECUTION_METADATA_SEQ_TBL = "alter table task_execution_metadata_seq_lc rename to TASK_EXECUTION_METADATA_SEQ"; + + protected final static String RENAME_TASK_EXECUTION_METADATA_SEQ_LC = "alter sequence task_execution_metadata_seq rename to task_execution_metadata_seq_lc"; + + protected final static String RENAME_TASK_EXECUTION_METADATA_SEQ = "alter sequence task_execution_metadata_seq_lc rename to TASK_EXECUTION_METADATA_SEQ"; + + protected final static String CREATE_SEQUENCE_TASK_EXECUTION_METADATA_SEQ_LC = "CREATE SEQUENCE task_execution_metadata_seq_lc"; + + protected final static String ALTER_SEQUENCE_TASK_EXECUTION_METADATA_SEQ_LC = "select setval(task_execution_metadata_seq_lc, select nextval(task_execution_metadata_seq), false)"; + + protected final static String DROP_SEQUENCE_TASK_EXECUTION_METADATA_SEQ = "drop sequence task_execution_metadata_seq"; + + protected final static String CREATE_SEQUENCE_TASK_EXECUTION_METADATA_SEQ = "create sequence TASK_EXECUTION_METADATA_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + protected final static String ALTER_SEQUENCE_TASK_EXECUTION_METADATA_SEQ = "select setval(TASK_EXECUTION_METADATA_SEQ, select nextval(task_execution_metadata_seq_lc), false)"; + + protected final static String DROP_SEQUENCE_TASK_EXECUTION_METADATA_SEQ_LC = "drop sequence task_execution_metadata_seq_lc"; + + public AbstractCaseSensitiveMigration() { + super(null); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCreateBatchIndexesMigration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCreateBatchIndexesMigration.java new file mode 100644 index 0000000000..3e7fa77ab6 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCreateBatchIndexesMigration.java @@ -0,0 +1,55 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; + +/** + * Provide indexes to improve aggregate view performance + * @author Corneil du Plessis + */ +public abstract class AbstractCreateBatchIndexesMigration extends AbstractMigration { + protected static final String CREATE_BATCH_STEP_EXECUTION_JOB_EXECUTION_ID_INDEX = + "create index BATCH_STEP_EXECUTION_JOB_EXECUTION_ID_IX on BATCH_STEP_EXECUTION(JOB_EXECUTION_ID)"; + protected static final String CREATE_BOOT3_BATCH_STEP_EXECUTION_JOB_EXECUTION_ID_INDEX = + "create index BOOT3_BATCH_STEP_EXECUTION_JOB_EXECUTION_ID_IX on BOOT3_BATCH_STEP_EXECUTION(JOB_EXECUTION_ID)"; + protected static final String CREATE_BOOT3_TASK_TASK_BATCH_JOB_EXECUTION_ID_INDEX = + "create index BOOT3_TASK_TASK_BATCH_JOB_EXECUTION_ID_IX on BOOT3_TASK_TASK_BATCH(JOB_EXECUTION_ID)"; + protected static final String CREATE_TASK_TASK_BATCH_JOB_EXECUTION_ID_INDEX = + "create index TASK_TASK_BATCH_JOB_EXECUTION_ID_IX on TASK_TASK_BATCH(JOB_EXECUTION_ID)"; + protected static final String CREATE_BATCH_JOB_EXECUTION_START_TIME_INDEX = + "create index BATCH_JOB_EXECUTION_START_TIME_IX on BATCH_JOB_EXECUTION(START_TIME)"; + protected static final String CREATE_BOOT3_BATCH_JOB_EXECUTION_START_TIME_INDEX = + "create index BOOT3_BATCH_JOB_EXECUTION_START_TIME_IX on BOOT3_BATCH_JOB_EXECUTION(START_TIME)"; + + public AbstractCreateBatchIndexesMigration() { + super(null); + } + + @Override + public List getCommands() { + return Arrays.asList(SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_JOB_EXECUTION_ID_INDEX), + SqlCommand.from(CREATE_BOOT3_BATCH_STEP_EXECUTION_JOB_EXECUTION_ID_INDEX), + SqlCommand.from(CREATE_BOOT3_TASK_TASK_BATCH_JOB_EXECUTION_ID_INDEX), + SqlCommand.from(CREATE_TASK_TASK_BATCH_JOB_EXECUTION_ID_INDEX), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_START_TIME_INDEX), + SqlCommand.from(CREATE_BOOT3_BATCH_JOB_EXECUTION_START_TIME_INDEX)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCreateTaskParentIndexMigration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCreateTaskParentIndexMigration.java new file mode 100644 index 0000000000..11d44e7d18 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCreateTaskParentIndexMigration.java @@ -0,0 +1,45 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; + +/** + * Provide indexes to improve performance of finding child tasks. + * @author Corneil du Plessis + */ +public abstract class AbstractCreateTaskParentIndexMigration extends AbstractMigration { + protected static final String CREATE_TASK_PARENT_INDEX = + "create index TASK_EXECUTION_PARENT_IX on TASK_EXECUTION(PARENT_EXECUTION_ID)"; + protected static final String CREATE_BOOT3_TASK_PARENT_INDEX = + "create index BOOT3_TASK_EXECUTION_PARENT_IX on BOOT3_TASK_EXECUTION(PARENT_EXECUTION_ID)"; + + public AbstractCreateTaskParentIndexMigration() { + super(null); + } + + @Override + public List getCommands() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_PARENT_INDEX), + SqlCommand.from(CREATE_BOOT3_TASK_PARENT_INDEX) + ); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/DataFlowFlywayConfigurationCustomizer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/DataFlowFlywayConfigurationCustomizer.java index ba59ef1f0a..463c6fc242 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/DataFlowFlywayConfigurationCustomizer.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/DataFlowFlywayConfigurationCustomizer.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,36 +18,44 @@ import javax.sql.DataSource; import org.flywaydb.core.api.configuration.FluentConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.boot.autoconfigure.flyway.FlywayConfigurationCustomizer; import org.springframework.boot.jdbc.DatabaseDriver; +import org.springframework.cloud.dataflow.common.flyway.DatabaseDriverUtils; import org.springframework.cloud.dataflow.server.db.migration.db2.Db2BeforeBaseline; +import org.springframework.cloud.dataflow.server.db.migration.mariadb.MariadbBeforeBaseline; import org.springframework.cloud.dataflow.server.db.migration.mysql.MysqlBeforeBaseline; import org.springframework.cloud.dataflow.server.db.migration.oracle.OracleBeforeBaseline; import org.springframework.cloud.dataflow.server.db.migration.postgresql.PostgresBeforeBaseline; import org.springframework.cloud.dataflow.server.db.migration.sqlserver.MsSqlBeforeBaseline; -import org.springframework.jdbc.support.JdbcUtils; -import org.springframework.jdbc.support.MetaDataAccessException; /** * Flyway {@link FlywayConfigurationCustomizer} bean customizing callbacks per * active db vendor. * * @author Janne Valkealahti - * + * @author Chris Bono */ public class DataFlowFlywayConfigurationCustomizer implements FlywayConfigurationCustomizer { + private static final Logger LOG = LoggerFactory.getLogger(DataFlowFlywayConfigurationCustomizer.class); + @Override public void customize(FluentConfiguration configuration) { - // boot's flyway auto-config doesn't allow to define callbacks per + // Boot's flyway auto-config doesn't allow to define callbacks per // vendor id, so essentially customizing those here. DataSource dataSource = configuration.getDataSource(); - DatabaseDriver databaseDriver = getDatabaseDriver(dataSource); + DatabaseDriver databaseDriver = DatabaseDriverUtils.getDatabaseDriver(dataSource); + LOG.info("Adding vendor specific Flyway callback for {}", databaseDriver.name()); if (databaseDriver == DatabaseDriver.POSTGRESQL) { configuration.callbacks(new PostgresBeforeBaseline()); } - else if (databaseDriver == DatabaseDriver.MYSQL || databaseDriver == DatabaseDriver.MARIADB) { + else if (databaseDriver == DatabaseDriver.MARIADB) { + configuration.callbacks(new MariadbBeforeBaseline()); + } + else if (databaseDriver == DatabaseDriver.MYSQL) { configuration.callbacks(new MysqlBeforeBaseline()); } else if (databaseDriver == DatabaseDriver.SQLSERVER) { @@ -60,15 +68,4 @@ else if (databaseDriver == DatabaseDriver.DB2) { configuration.callbacks(new Db2BeforeBaseline()); } } - - private DatabaseDriver getDatabaseDriver(DataSource dataSource) { - // copied from boot's flyway auto-config to get matching db vendor id - try { - String url = JdbcUtils.extractDatabaseMetaData(dataSource, "getURL"); - return DatabaseDriver.fromJdbcUrl(url); - } - catch (MetaDataAccessException ex) { - throw new IllegalStateException(ex); - } - } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/DropColumnSqlCommands.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/DropColumnSqlCommands.java new file mode 100644 index 0000000000..d4ea738d3b --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/DropColumnSqlCommands.java @@ -0,0 +1,113 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** + * Utility class that can be used in future to drop columns. + * This checks for the existence of the column before dropping. + * @author Corneil du Plessis + */ +public class DropColumnSqlCommands extends SqlCommand { + private final static Logger logger = LoggerFactory.getLogger(DropColumnSqlCommands.class); + + private final List columnNames = new ArrayList<>(); + + public DropColumnSqlCommands(String... columnName) { + columnNames.addAll(Arrays.asList(columnName)); + } + + @Override + public void handle(JdbcTemplate jdbcTemplate, Connection connection) { + for(String name : columnNames) { + try { + dropColumn(jdbcTemplate, connection, name); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public boolean canHandleInJdbcTemplate() { + return true; + } + + protected void dropColumn(JdbcTemplate jdbcTemplate, Connection connection, String name) throws SQLException { + logger.debug("dropping:{}", name); + String [] parts = StringUtils.split(name, "."); + Assert.notNull(parts, "Expected 2 or more parts from " + name); + Assert.isTrue(parts.length > 1, "Expected 2 or more parts from " + name); + String columnName = parts[parts.length - 1]; + String tableName = parts[parts.length - 2]; + String schemaName = parts.length > 2 ? parts[parts.length - 3] : null; + logger.debug("Searching for {}.{}", tableName, columnName); + if(hasColumn(connection, schemaName, tableName, columnName)) { + String sql = String.format("alter table %s drop column %s", tableName, columnName); + logger.debug("Executing: {}", sql); + jdbcTemplate.execute(sql); + } + } + protected boolean hasColumn(Connection connection, String schemaName, String tableName, String columnName) throws SQLException { + String actualSchemaName = null; + if(StringUtils.hasText(schemaName)) { + try(ResultSet resultSet = connection.getMetaData().getSchemas()) { + while (resultSet.next()) { + String name = resultSet.getString("SCHEMA_NAME"); + // determine the actual name used in specific database metadata. + if(name.equalsIgnoreCase(schemaName)) { + actualSchemaName = name; + break; + } + } + } + } + String actualTableName = tableName; + try(ResultSet resultSet = connection.getMetaData().getTables(null, actualSchemaName, null, new String[] {"TABLE"})) { + while (resultSet.next()) { + String name = resultSet.getString("TABLE_NAME"); + // determine the actual name used in specific database metadata. + if(name.equalsIgnoreCase(tableName)) { + actualTableName = name; + break; + } + } + } + // actual names need to be same case as reported by meta data query for some databases. + try (ResultSet resultSet = connection.getMetaData().getColumns(null, actualSchemaName, actualTableName, null)) { + while (resultSet.next()) { + String foundColumnName = resultSet.getString("COLUMN_NAME"); + if (foundColumnName.equalsIgnoreCase(columnName)) { + return true; + } + } + } + return false; + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/PostgreSQLTextToOID.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/PostgreSQLTextToOID.java new file mode 100644 index 0000000000..5301ad12b9 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/PostgreSQLTextToOID.java @@ -0,0 +1,164 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration; + +import java.sql.Connection; +import java.sql.JDBCType; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLType; +import java.sql.Types; + +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.util.Assert; + +/** + * Provides for converting text or longtext fields in PostgreSQL to OID. + * + * @author Corneil du Plessis + */ +public class PostgreSQLTextToOID { + private final static Logger logger = LoggerFactory.getLogger(PostgreSQLTextToOID.class); + + private final static String ADD_TMP_OID_COL = "alter table %s add column %s oid"; + + private final static String ADD_TMP_TEXT_COL = "alter table %s add column %s text"; + + private final static String UPDATE_TMP_OID_COL = "update %s set %s = lo_from_bytea(0, %s::bytea), %s = null where %s in (select %s from %s where %s is null and %s is not null limit 100)"; + + private final static String UPDATE_TMP_TEXT_COL = "update %s set %s = convert_from(lo_get(cast(%s as bigint)),'UTF8'), %s = null where %s in (select %s from %s where %s is null and %s is not null limit 100)"; + + private final static String DROP_ORIGINAL_COL = "alter table %s drop column %s"; + + private final static String RENAME_TMP_COL = "alter table %s rename column %s to %s"; + + public static void convertColumnToOID(String table, String id, String column, DataSource dataSource) { + + try (Connection connection = dataSource.getConnection()) { + String tableName = table; + try(ResultSet tables = connection.getMetaData().getTables(null, null, null, null)) { + while(tables.next()) { + String name = tables.getString("TABLE_NAME"); + if(name.equalsIgnoreCase(table)) { + tableName = name; + break; + } + } + } + logger.debug("searching:{}", tableName); + try (ResultSet resultSet = connection.getMetaData().getColumns(null, null, tableName, null)) { + int count = 0; + while (resultSet.next()) { + String columnName = resultSet.getString("COLUMN_NAME"); + if(columnName.equalsIgnoreCase(column)) { + count++; + int dataType = resultSet.getInt("DATA_TYPE"); + logger.info("Found {}:{}:{}", table, column, JDBCType.valueOf(dataType)); + if (dataType == Types.BIGINT) { + return; + } + } + } + Assert.isTrue(count > 0, "Cannot find " + table + ":" + column); + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + JdbcTemplate template = new JdbcTemplate(dataSource); + final String tmp_col = column + "_tmp"; + String sqlTmp = String.format(ADD_TMP_OID_COL, table, tmp_col); + logger.debug("Executing:{}", sqlTmp); + template.update(sqlTmp); + int total = 0; + do { + String sql = String.format(UPDATE_TMP_OID_COL, table, tmp_col, column, column, id, id, table, tmp_col, column); + logger.debug("Executing:{}", sql); + int count = template.update(sql); + total += count; + if (count <= 0) { + logger.info("Updated {} rows of {} in {}", total, column, table); + break; + } + } while (true); + String sqlDrop = String.format(DROP_ORIGINAL_COL, table, column); + logger.debug("Executing:{}", sqlDrop); + template.update(sqlDrop); + String sqlRename = String.format(RENAME_TMP_COL, table, tmp_col, column); + logger.debug("Executing:{}", sqlRename); + template.update(sqlRename); + } + + public static void convertColumnFromOID(String table, String id, String column, DataSource dataSource) { + try (Connection connection = dataSource.getConnection()) { + String tableName = table; + try(ResultSet tables = connection.getMetaData().getTables(null, null, null, null)) { + while(tables.next()) { + String name = tables.getString("TABLE_NAME"); + if(name.equalsIgnoreCase(table)) { + tableName = name; + break; + } + } + } + logger.debug("searching:{}", tableName); + try (ResultSet resultSet = connection.getMetaData().getColumns(null, null, tableName, null)) { + int count = 0; + while (resultSet.next()) { + String columnName = resultSet.getString("COLUMN_NAME"); + if(columnName.equalsIgnoreCase(column)) { + count++; + int dataType = resultSet.getInt("DATA_TYPE"); + logger.info("Found {}:{}:{}", table, column, JDBCType.valueOf(dataType)); + if (dataType != Types.BIGINT) { + return; + } + } + } + Assert.isTrue(count > 0, "Cannot find " + table + ":" + column); + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + JdbcTemplate template = new JdbcTemplate(dataSource); + final String tmp_col = column + "_tmp"; + String sqlTmp = String.format(ADD_TMP_TEXT_COL, table, tmp_col); + logger.debug("Executing:{}", sqlTmp); + template.update(sqlTmp); + int total = 0; + do { + String sql = String.format(UPDATE_TMP_TEXT_COL, table, tmp_col, column, column, id, id, table, tmp_col, column); + logger.debug("Executing:{}", sql); + int count = template.update(sql); + total += count; + if (count <= 0) { + logger.info("Updated {} rows of {} in {}", total, column, table); + break; + } + } while (true); + String sqlDrop = String.format(DROP_ORIGINAL_COL, table, column); + logger.debug("Executing:{}", sqlDrop); + template.update(sqlDrop); + String sqlRename = String.format(RENAME_TMP_COL, table, tmp_col, column); + logger.debug("Executing:{}", sqlRename); + template.update(sqlRename); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V10__CreateBatchIndexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V10__CreateBatchIndexes.java new file mode 100644 index 0000000000..970fa855f3 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V10__CreateBatchIndexes.java @@ -0,0 +1,22 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateBatchIndexesMigration; + +public class V10__CreateBatchIndexes extends AbstractCreateBatchIndexesMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V11__CreateTaskParentIndex.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V11__CreateTaskParentIndex.java new file mode 100644 index 0000000000..6efeb5db52 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V11__CreateTaskParentIndex.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateTaskParentIndexMigration; + +public class V11__CreateTaskParentIndex extends AbstractCreateTaskParentIndexMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V1__Initial_Setup.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V1__Initial_Setup.java index 7487ee68b5..b2f476bd44 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V1__Initial_Setup.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V1__Initial_Setup.java @@ -111,7 +111,7 @@ public class V1__Initial_Setup extends AbstractInitialSetupMigration { ")"; private final static String CREATE_TASK_SEQ_SEQUENCE = - "CREATE SEQUENCE TASK_SEQ AS BIGINT START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE"; + "CREATE SEQUENCE TASK_SEQ AS BIGINT START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE"; public final static String CREATE_TASK_LOCK_TABLE = "CREATE TABLE TASK_LOCK (\n" + diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V5__Add_Task_Execution_Params_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V5__Add_Task_Execution_Params_Indexes.java new file mode 100644 index 0000000000..17718d3fc0 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V5__Add_Task_Execution_Params_Indexes.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds index for TASK_EXECUTION_ID on TASK_EXECUTION_PARAMS. + * + * @author Claudio Tasso + * + * @since 2.10 + */ +public class V5__Add_Task_Execution_Params_Indexes extends BaseJavaMigration { + + public final static String ADD_INDEX_TO_STEP_EXECUTION_PARAMS = "create index TASK_EXECUTION_ID_IDX on TASK_EXECUTION_PARAMS (TASK_EXECUTION_ID)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ADD_INDEX_TO_STEP_EXECUTION_PARAMS))); + + + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V6__Boot3_Boot_Version.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V6__Boot3_Boot_Version.java new file mode 100644 index 0000000000..f711020729 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V6__Boot3_Boot_Version.java @@ -0,0 +1,28 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractBootVersionMigration; + +/** + * The boot_version field will provide for indicating the version of + * Spring Boot used by the application and by implication the + * schema version of task and batch tables. + * @author Corneil du Plessis + * @since 2.11 + */ +public class V6__Boot3_Boot_Version extends AbstractBootVersionMigration { +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V7__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V7__Boot3_Add_Task3_Batch5_Schema.java new file mode 100644 index 0000000000..23071e63e3 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V7__Boot3_Add_Task3_Batch5_Schema.java @@ -0,0 +1,204 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractBoot3InitialSetupMigration; + +/** + * Adds the spring-cloud-task V3 + * and spring-batch V5 + * schemas to support Boot3 compatability. + *

    Schemas have added table prefix of {@code "BOOT3_"}. + * + * @author Chris Bono + */ +public class V7__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetupMigration { + + private final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP(9),\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ")"; + + private final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint BOOT3_TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE BOOT3_TASK_LOCK (\n" + + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE TIMESTAMP(9) NOT NULL,\n" + + " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")"; + + private final static String CREATE_TASK_SEQ_SEQUENCE = + "CREATE SEQUENCE BOOT3_TASK_SEQ AS BIGINT START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE"; + + private final static String CREATE_TASK_EXECUTION_METADATA_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST CLOB,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_TASK_EXECUTION_METADATA_SEQ = + "CREATE SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE"; + + private final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE (\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ")"; + + private final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME TIMESTAMP(9) NOT NULL,\n" + + " START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP(9),\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BOOT3_BATCH_JOB_INSTANCE (JOB_INSTANCE_ID)\n" + + ")"; + + private final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500),\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " CREATE_TIME TIMESTAMP(9) NOT NULL,\n" + + " START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP(9),\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT CLOB,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION (STEP_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT CLOB,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE"; + + private final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE"; + + private final static String CREATE_BATCH_JOB_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE"; + + @Override + public List createTask3Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_TASK_TASK_BATCH), + SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ) + ); + } + + @Override + public List createBatch5Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_BATCH_JOB_INSTANCE_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE) + ); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V8__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V8__AddAggregateViews.java new file mode 100644 index 0000000000..bb0b309056 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V8__AddAggregateViews.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V8__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V9__DropJobConfigurationLocation.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V9__DropJobConfigurationLocation.java new file mode 100644 index 0000000000..4458973465 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V9__DropJobConfigurationLocation.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.server.db.migration.DropColumnSqlCommands; + +import java.util.Collections; + +/** + * Removes extra JOB_CONFIGURATION_LOCATION columns. + * @author Corneil du Plessis + */ +public class V9__DropJobConfigurationLocation extends AbstractMigration { + public V9__DropJobConfigurationLocation() { + super(Collections.singletonList(new DropColumnSqlCommands("BOOT3_BATCH_JOB_EXECUTION.JOB_CONFIGURATION_LOCATION"))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/MariadbBeforeBaseline.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/MariadbBeforeBaseline.java new file mode 100644 index 0000000000..37427e2ada --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/MariadbBeforeBaseline.java @@ -0,0 +1,198 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractBaselineCallback; + +/** + * Baselining schema setup for {@code postgres}. + * + * @author Janne Valkealahti + * + */ +public class MariadbBeforeBaseline extends AbstractBaselineCallback { + + public final static String DROP_AUDIT_RECORDS_AUDIT_ACTION_IDX_INDEX = + "drop index AUDIT_RECORDS_AUDIT_ACTION_IDX on AUDIT_RECORDS"; + + public final static String DROP_AUDIT_RECORDS_AUDIT_OPERATION_IDX_INDEX = + "drop index AUDIT_RECORDS_AUDIT_OPERATION_IDX on AUDIT_RECORDS"; + + public final static String DROP_AUDIT_RECORDS_CORRELATION_ID_IDX_INDEX = + "drop index AUDIT_RECORDS_CORRELATION_ID_IDX on AUDIT_RECORDS"; + + public final static String DROP_AUDIT_RECORDS_CREATED_ON_IDX_INDEX = + "drop index AUDIT_RECORDS_CREATED_ON_IDX on AUDIT_RECORDS"; + + public final static String CREATE_APP_REGISTRATION_TMP_TABLE = + V1__Initial_Setup.CREATE_APP_REGISTRATION_TABLE.replaceFirst("app_registration", "app_registration_tmp"); + + public final static String INSERT_APP_REGISTRATION_DATA = + "insert into\n" + + " app_registration_tmp (id, object_version, default_version, metadata_uri, name, type, uri, version) \n" + + " select id, object_Version, default_Version, metadata_Uri, name, type, uri, version\n" + + " from APP_REGISTRATION"; + + public final static String DROP_APP_REGISTRATION_TABLE = + "drop table APP_REGISTRATION"; + + public final static String RENAME_APP_REGISTRATION_TMP_TABLE = + "alter table app_registration_tmp rename to app_registration"; + + public final static String CREATE_STREAM_DEFINITIONS_TMP_TABLE = + V1__Initial_Setup.CREATE_STREAM_DEFINITIONS_TABLE.replaceFirst("stream_definitions", "stream_definitions_tmp"); + + public final static String INSERT_STREAM_DEFINITIONS_DATA = + "insert into\n" + + " stream_definitions_tmp (definition_name, definition) \n" + + " select DEFINITION_NAME, DEFINITION\n" + + " from STREAM_DEFINITIONS"; + + public final static String DROP_STREAM_DEFINITIONS_TABLE = + "drop table STREAM_DEFINITIONS"; + + public final static String RENAME_STREAM_DEFINITIONS_TMP_TABLE = + "alter table stream_definitions_tmp rename to stream_definitions"; + + public final static String CREATE_TASK_DEFINITIONS_TMP_TABLE = + V1__Initial_Setup.CREATE_TASK_DEFINITIONS_TABLE.replaceFirst("task_definitions", "task_definitions_tmp"); + + public final static String INSERT_TASK_DEFINITIONS_DATA = + "insert into\n" + + " task_definitions_tmp (definition_name, definition) \n" + + " select DEFINITION_NAME, DEFINITION\n" + + " from TASK_DEFINITIONS"; + + public final static String DROP_TASK_DEFINITIONS_TABLE = + "drop table TASK_DEFINITIONS"; + + public final static String RENAME_TASK_DEFINITIONS_TMP_TABLE = + "alter table task_definitions_tmp rename to task_definitions"; + + public final static String CREATE_AUDIT_RECORDS_TMP_TABLE = + V1__Initial_Setup.CREATE_AUDIT_RECORDS_TABLE.replaceFirst("audit_records", "audit_records_tmp"); + + public final static String INSERT_AUDIT_RECORDS_DATA = + "insert into\n" + + " audit_records_tmp (id, audit_action, audit_data, audit_operation, correlation_id, created_by, created_on)\n" + + " select id, audit_Action, audit_data, audit_Operation, correlation_id, created_by, created_On\n" + + " from AUDIT_RECORDS"; + + public final static String DROP_AUDIT_RECORDS_TABLE = + "drop table AUDIT_RECORDS"; + + public final static String RENAME_AUDIT_RECORDS_TMP_TABLE = + "alter table audit_records_tmp rename to audit_records"; + + public final static String CREATE_AUDIT_RECORDS_AUDIT_ACTION_IDX_INDEX = + "create index audit_records_audit_action_idx on audit_records (audit_action)"; + + public final static String CREATE_AUDIT_RECORDS_AUDIT_OPERATION_IDX_INDEX = + "create index audit_records_audit_operation_idx on audit_records (audit_operation)"; + + public final static String CREATE_AUDIT_RECORDS_CORRELATION_ID_IDX_INDEX = + "create index audit_records_correlation_id_idx on audit_records (correlation_id)"; + + public final static String CREATE_AUDIT_RECORDS_CREATED_ON_IDX_INDEX = + "create index audit_records_created_on_idx on audit_records (created_on)"; + + /** + * Instantiates a new postgres before baseline. + */ + public MariadbBeforeBaseline() { + super(new V1__Initial_Setup()); + } + + @Override + public List dropIndexes() { + return Arrays.asList( + SqlCommand.from(DROP_AUDIT_RECORDS_AUDIT_ACTION_IDX_INDEX), + SqlCommand.from(DROP_AUDIT_RECORDS_AUDIT_OPERATION_IDX_INDEX), + SqlCommand.from(DROP_AUDIT_RECORDS_CORRELATION_ID_IDX_INDEX), + SqlCommand.from(DROP_AUDIT_RECORDS_CREATED_ON_IDX_INDEX)); + } + + @Override + public List changeAppRegistrationTable() { + return Arrays.asList( + SqlCommand.from(CREATE_APP_REGISTRATION_TMP_TABLE), + SqlCommand.from(INSERT_APP_REGISTRATION_DATA), + SqlCommand.from(DROP_APP_REGISTRATION_TABLE), + SqlCommand.from(RENAME_APP_REGISTRATION_TMP_TABLE)); + } + + @Override + public List changeUriRegistryTable() { + // Other db types have support migration of app_registration + // and hibernate_sequence from dataflow 1.7.x line. As + // mariadb is new supported db type(beyond previously using it + // as mysql), we should not have a need for these migrations. + return Collections.emptyList(); + } + + @Override + public List changeStreamDefinitionsTable() { + return Arrays.asList( + SqlCommand.from(CREATE_STREAM_DEFINITIONS_TMP_TABLE), + SqlCommand.from(INSERT_STREAM_DEFINITIONS_DATA), + SqlCommand.from(DROP_STREAM_DEFINITIONS_TABLE), + SqlCommand.from(RENAME_STREAM_DEFINITIONS_TMP_TABLE)); + } + + @Override + public List changeTaskDefinitionsTable() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_DEFINITIONS_TMP_TABLE), + SqlCommand.from(INSERT_TASK_DEFINITIONS_DATA), + SqlCommand.from(DROP_TASK_DEFINITIONS_TABLE), + SqlCommand.from(RENAME_TASK_DEFINITIONS_TMP_TABLE)); + } + + @Override + public List changeAuditRecordsTable() { + return Arrays.asList( + SqlCommand.from(CREATE_AUDIT_RECORDS_TMP_TABLE), + SqlCommand.from(INSERT_AUDIT_RECORDS_DATA), + SqlCommand.from(DROP_AUDIT_RECORDS_TABLE), + SqlCommand.from(RENAME_AUDIT_RECORDS_TMP_TABLE)); + } + + @Override + public List createIndexes() { + return Arrays.asList( + SqlCommand.from(CREATE_AUDIT_RECORDS_AUDIT_ACTION_IDX_INDEX), + SqlCommand.from(CREATE_AUDIT_RECORDS_AUDIT_OPERATION_IDX_INDEX), + SqlCommand.from(CREATE_AUDIT_RECORDS_CORRELATION_ID_IDX_INDEX), + SqlCommand.from(CREATE_AUDIT_RECORDS_CREATED_ON_IDX_INDEX)); + } + + @Override + public List createTaskLockTable() { + return Arrays.asList( + SqlCommand.from(V1__Initial_Setup.CREATE_TASK_LOCK_TABLE)); + } + + @Override + public List createTaskDeploymentTable() { + return Arrays.asList(SqlCommand.from( + V1__Initial_Setup.CREATE_TASK_DEPLOYMENT_TABLE)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V10__DropJobConfigurationLocation.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V10__DropJobConfigurationLocation.java new file mode 100644 index 0000000000..985bfd3141 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V10__DropJobConfigurationLocation.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Collections; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.server.db.migration.DropColumnSqlCommands; + +/** + * Removes extra JOB_CONFIGURATION_LOCATION columns. + * @author Corneil du Plessis + */ +public class V10__DropJobConfigurationLocation extends AbstractMigration { + public V10__DropJobConfigurationLocation() { + super(Collections.singletonList(new DropColumnSqlCommands("BOOT3_BATCH_JOB_EXECUTION.JOB_CONFIGURATION_LOCATION"))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V11__CreateBatchIndexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V11__CreateBatchIndexes.java new file mode 100644 index 0000000000..6bddbfd404 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V11__CreateBatchIndexes.java @@ -0,0 +1,22 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateBatchIndexesMigration; + +public class V11__CreateBatchIndexes extends AbstractCreateBatchIndexesMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V12__CreateTaskParentIndex.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V12__CreateTaskParentIndex.java new file mode 100644 index 0000000000..3d48380d07 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V12__CreateTaskParentIndex.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateTaskParentIndexMigration; + +public class V12__CreateTaskParentIndex extends AbstractCreateTaskParentIndexMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V1__Initial_Setup.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V1__Initial_Setup.java new file mode 100644 index 0000000000..5db729f703 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V1__Initial_Setup.java @@ -0,0 +1,313 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractInitialSetupMigration; + +/** + * Initial schema setup for {@code mysql}. + * + * @author Janne Valkealahti + * + */ +public class V1__Initial_Setup extends AbstractInitialSetupMigration { + + public final static String CREATE_HIBERNATE_SEQUENCE = + "create sequence if not exists hibernate_sequence start 1 increment 1"; + + public final static String CREATE_APP_REGISTRATION_TABLE = + "create table app_registration (\n" + + " id bigint not null,\n" + + " object_version bigint,\n" + + " default_version bit,\n" + + " metadata_uri longtext,\n" + + " name varchar(255),\n" + + " type integer,\n" + + " uri longtext,\n" + + " version varchar(255),\n" + + " primary key (id)\n" + + ")"; + + public final static String CREATE_AUDIT_RECORDS_TABLE = + "create table audit_records (\n" + + " id bigint not null,\n" + + " audit_action bigint,\n" + + " audit_data longtext,\n" + + " audit_operation bigint,\n" + + " correlation_id varchar(255),\n" + + " created_by varchar(255),\n" + + " created_on datetime,\n" + + " primary key (id)\n" + + ")"; + + public final static String CREATE_STREAM_DEFINITIONS_TABLE = + "create table stream_definitions (\n" + + " definition_name varchar(255) not null,\n" + + " definition longtext,\n" + + " primary key (definition_name)\n" + + ")"; + + public final static String CREATE_TASK_DEFINITIONS_TABLE = + "create table task_definitions (\n" + + " definition_name varchar(255) not null,\n" + + " definition longtext,\n" + + " primary key (definition_name)\n" + + ")"; + + public final static String CREATE_TASK_DEPLOYMENT_TABLE = + "create table task_deployment (\n" + + " id bigint not null,\n" + + " object_version bigint,\n" + + " task_deployment_id varchar(255) not null,\n" + + " task_definition_name varchar(255) not null,\n" + + " platform_name varchar(255) not null,\n" + + " created_on datetime,\n" + + " primary key (id)\n" + + ")"; + + public final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE TASK_EXECUTION (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME DATETIME DEFAULT NULL,\n" + + " END_TIME DATETIME DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ")"; + + public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE TASK_EXECUTION_PARAMS (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE TASK_TASK_BATCH (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_TASK_SEQ_SEQUENCE = + "CREATE TABLE TASK_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; + + private final static String INSERT_TASK_SEQ_SEQUENCE = + "INSERT INTO TASK_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp"; + + public final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE TASK_LOCK (\n" + + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE DATETIME(6) NOT NULL,\n" + + " constraint LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + "CREATE TABLE BATCH_JOB_INSTANCE (\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "CREATE TABLE BATCH_JOB_EXECUTION (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME NOT NULL,\n" + + " START_TIME DATETIME DEFAULT NULL,\n" + + " END_TIME DATETIME DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME,\n" + + " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + + " constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BATCH_JOB_EXECUTION_PARAMS (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " TYPE_CD VARCHAR(6) NOT NULL,\n" + + " KEY_NAME VARCHAR(100) NOT NULL,\n" + + " STRING_VAL VARCHAR(250),\n" + + " DATE_VAL DATETIME DEFAULT NULL,\n" + + " LONG_VAL BIGINT,\n" + + " DOUBLE_VAL DOUBLE PRECISION,\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BATCH_STEP_EXECUTION (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " START_TIME DATETIME NOT NULL,\n" + + " END_TIME DATETIME DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME,\n" + + " constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = + "CREATE TABLE BATCH_STEP_EXECUTION_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; + + public final static String INSERT_BATCH_STEP_EXECUTION_SEQUENCE = + "INSERT INTO BATCH_STEP_EXECUTION_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_STEP_EXECUTION_SEQ)"; + + public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = + "CREATE TABLE BATCH_JOB_EXECUTION_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; + + public final static String INSERT_BATCH_JOB_EXECUTION_SEQUENCE = + "INSERT INTO BATCH_JOB_EXECUTION_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_JOB_EXECUTION_SEQ)"; + + public final static String CREATE_BATCH_JOB_SEQUENCE = + "CREATE TABLE BATCH_JOB_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; + + public final static String INSERT_BATCH_JOB_SEQUENCE = + "INSERT INTO BATCH_JOB_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_JOB_SEQ)"; + + public V1__Initial_Setup() { + super(null); + } + + @Override + public List createHibernateSequence() { + return Arrays.asList( + SqlCommand.from(CREATE_HIBERNATE_SEQUENCE)); + } + + @Override + public List createAppRegistrationTable() { + return Arrays.asList( + SqlCommand.from(CREATE_APP_REGISTRATION_TABLE)); + } + + @Override + public List createTaskDeploymentTable() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_DEPLOYMENT_TABLE)); + } + + @Override + public List createAuditRecordsTable() { + return Arrays.asList( + SqlCommand.from(CREATE_AUDIT_RECORDS_TABLE)); + } + + @Override + public List createStreamDefinitionsTable() { + return Arrays.asList( + SqlCommand.from(CREATE_STREAM_DEFINITIONS_TABLE)); + } + + @Override + public List createTaskDefinitionsTable() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_DEFINITIONS_TABLE)); + } + + @Override + public List createTaskTables() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_TASK_TASK_BATCH), + SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), + SqlCommand.from(INSERT_TASK_SEQ_SEQUENCE), + SqlCommand.from(CREATE_TASK_LOCK_TABLE)); + } + + @Override + public List createBatchTables() { + return Arrays.asList( + SqlCommand.from(CREATE_BATCH_JOB_INSTANCE_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQUENCE), + SqlCommand.from(INSERT_BATCH_STEP_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQUENCE), + SqlCommand.from(INSERT_BATCH_JOB_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE), + SqlCommand.from(INSERT_BATCH_JOB_SEQUENCE)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V2__Add_Descriptions_And_OriginalDefinition.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V2__Add_Descriptions_And_OriginalDefinition.java new file mode 100644 index 0000000000..7d2be50085 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V2__Add_Descriptions_And_OriginalDefinition.java @@ -0,0 +1,81 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds description column to stream_definitions and task_definitions + * tables and original_definition column to stream_definitions. + * + * @author Daniel Serleg + * @author Ilayaperumal Gopinathan + * @author Michael Minella + * + * @since 2.3 + */ +public class V2__Add_Descriptions_And_OriginalDefinition extends BaseJavaMigration { + + public final static String ALTER_STREAM_DEFINITION_TABLE_DESC = "alter table stream_definitions add description varchar(255)"; + + public final static String ALTER_STREAM_DEFINITION_TABLE_ORIG_DEF = "alter table stream_definitions add original_definition longtext"; + + public final static String ALTER_TASK_DEFINITION_TABLE = "" + + "alter table task_definitions add description varchar(255)"; + + public final static String UPDATE_STREAM_DEFINITION_TABLE_ORIG_DEF = "update stream_definitions set original_definition=definition"; + + public final static String CREATE_TASK_METADATA_TABLE = + "CREATE TABLE task_execution_metadata (\n" + + " id BIGINT NOT NULL,\n" + + " task_execution_id BIGINT NOT NULL,\n" + + " task_execution_manifest LONGTEXT,\n" + + " primary key (id),\n" + + " CONSTRAINT TASK_METADATA_FK FOREIGN KEY (task_execution_id)\n" + + " REFERENCES TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_TASK_METADATA_SEQUENCE = + "CREATE TABLE task_execution_metadata_seq (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; + + public final static String INSERT_TASK_METADATA_SEQUENCE = + "INSERT INTO task_execution_metadata_seq (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from task_execution_metadata_seq)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ALTER_STREAM_DEFINITION_TABLE_DESC), + SqlCommand.from(ALTER_STREAM_DEFINITION_TABLE_ORIG_DEF), + SqlCommand.from(ALTER_TASK_DEFINITION_TABLE), + SqlCommand.from(UPDATE_STREAM_DEFINITION_TABLE_ORIG_DEF), + SqlCommand.from(CREATE_TASK_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_METADATA_SEQUENCE), + SqlCommand.from(INSERT_TASK_METADATA_SEQUENCE))); + + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V3__Add_Platform_To_AuditRecords.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V3__Add_Platform_To_AuditRecords.java new file mode 100644 index 0000000000..ef467e5285 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V3__Add_Platform_To_AuditRecords.java @@ -0,0 +1,44 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds platformName column to audit_records. + * + * @author Daniel Serleg + * + * @since 2.4 + */ +public class V3__Add_Platform_To_AuditRecords extends BaseJavaMigration { + + public final static String ALTER_AUDIT_RECORDS_TABLE_PLATFORM = "alter table audit_records add platform_name varchar(255)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ALTER_AUDIT_RECORDS_TABLE_PLATFORM))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V4__Add_Step_Name_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V4__Add_Step_Name_Indexes.java new file mode 100644 index 0000000000..86341c3c50 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V4__Add_Step_Name_Indexes.java @@ -0,0 +1,44 @@ +/* + * Copyright 2020-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds index for STEP_NAME on BATCH_STEP_EXECUTION. + * + * @author Glenn Renfro + * + * @since 2.7 + */ +public class V4__Add_Step_Name_Indexes extends BaseJavaMigration { + + public final static String ADD_INDEX_TO_BATCH_STEP_EXECUTION = "create index STEP_NAME_IDX on BATCH_STEP_EXECUTION (STEP_NAME)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ADD_INDEX_TO_BATCH_STEP_EXECUTION))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V5__Add_Task_Execution_Params_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V5__Add_Task_Execution_Params_Indexes.java new file mode 100644 index 0000000000..4966bbd8c6 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V5__Add_Task_Execution_Params_Indexes.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds index for TASK_EXECUTION_ID on TASK_EXECUTION_PARAMS. + * + * @author Claudio Tasso + * + * @since 2.10 + */ +public class V5__Add_Task_Execution_Params_Indexes extends BaseJavaMigration { + + public final static String ADD_INDEX_TO_STEP_EXECUTION_PARAMS = "create index TASK_EXECUTION_ID_IDX on TASK_EXECUTION_PARAMS (TASK_EXECUTION_ID)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ADD_INDEX_TO_STEP_EXECUTION_PARAMS))); + + + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V6__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V6__Boot3_Add_Task3_Batch5_Schema.java new file mode 100644 index 0000000000..de3d5cd3f3 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V6__Boot3_Add_Task3_Batch5_Schema.java @@ -0,0 +1,206 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractBoot3InitialSetupMigration; + +/** + * Adds the spring-cloud-task V3 + * and spring-batch V5 + * schemas to support Boot3 compatability. + *

    Schemas have added table prefix of {@code "BOOT3_"}. + * + * @author Chris Bono + */ +public class V6__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetupMigration { + + private final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME DATETIME(6) DEFAULT NULL,\n" + + " END_TIME DATETIME(6) DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE BOOT3_TASK_LOCK (\n" + + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE DATETIME(6) NOT NULL,\n" + + " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_SEQ = + "CREATE SEQUENCE BOOT3_TASK_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + private final static String CREATE_TASK_EXECUTION_METADATA_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST LONGTEXT,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_EXECUTION_METADATA_SEQ = + "CREATE SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE (\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME(6) NOT NULL,\n" + + " START_TIME DATETIME(6) DEFAULT NULL,\n" + + " END_TIME DATETIME(6) DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME(6),\n" + + " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500),\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + + "STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY ,\n" + + "VERSION BIGINT NOT NULL,\n" + + "STEP_NAME VARCHAR(100) NOT NULL,\n" + + "JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + "CREATE_TIME DATETIME(6) NOT NULL,\n" + + "START_TIME DATETIME(6) DEFAULT NULL ,\n" + + "END_TIME DATETIME(6) DEFAULT NULL ,\n" + + "STATUS VARCHAR(10) ,\n" + + "COMMIT_COUNT BIGINT ,\n" + + "READ_COUNT BIGINT ,\n" + + "FILTER_COUNT BIGINT ,\n" + + "WRITE_COUNT BIGINT ,\n" + + "READ_SKIP_COUNT BIGINT ,\n" + + "WRITE_SKIP_COUNT BIGINT ,\n" + + "PROCESS_SKIP_COUNT BIGINT ,\n" + + "ROLLBACK_COUNT BIGINT ,\n" + + "EXIT_CODE VARCHAR(2500) ,\n" + + "EXIT_MESSAGE VARCHAR(2500) ,\n" + + "LAST_UPDATED DATETIME(6),\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_SEQ = + "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_SEQ = + "CREATE SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_SEQ = + "CREATE SEQUENCE BOOT3_BATCH_JOB_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + + @Override + public List createTask3Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_TASK_TASK_BATCH), + SqlCommand.from(CREATE_TASK_SEQ), + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ) + ); + } + + @Override + public List createBatch5Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_BATCH_JOB_INSTANCE_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQ), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQ), + SqlCommand.from(CREATE_BATCH_JOB_SEQ) + ); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V7__Boot3_Boot_Version.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V7__Boot3_Boot_Version.java new file mode 100644 index 0000000000..f773634f32 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V7__Boot3_Boot_Version.java @@ -0,0 +1,28 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractBootVersionMigration; + +/** + * The boot_version field will provide for indicating the version of + * Spring Boot used by the application and by implication the + * schema version of task and batch tables. + * @author Corneil du Plessis + * @since 2.11 + */ +public class V7__Boot3_Boot_Version extends AbstractBootVersionMigration { +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V8__RenameLowerCaseTables.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V8__RenameLowerCaseTables.java new file mode 100644 index 0000000000..503f1db50f --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V8__RenameLowerCaseTables.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; +import java.util.List; + +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractCaseSensitiveMigration; +import org.springframework.jdbc.core.JdbcTemplate; + +/** + * Since MariaDB operates in a case-sensitive mode for table and column names we need TASK_ tables referenced with a prefix to be uppercase. + * + * @author Corneil du Plessis + */ +public class V8__RenameLowerCaseTables extends AbstractCaseSensitiveMigration { + + + @Override + public List getCommands() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_LC), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_SEQ_LC_TBL), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_SEQ_TBL) + ); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V9__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V9__AddAggregateViews.java new file mode 100644 index 0000000000..7152079689 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V9__AddAggregateViews.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V9__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlBeforeBaseline.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlBeforeBaseline.java index 444bfc5384..c72851deb4 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlBeforeBaseline.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlBeforeBaseline.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,7 +25,7 @@ * Baselining schema setup for {@code postgres}. * * @author Janne Valkealahti - * + * @author Chris Bono */ public class MysqlBeforeBaseline extends AbstractBaselineCallback { @@ -46,9 +46,9 @@ public class MysqlBeforeBaseline extends AbstractBaselineCallback { public final static String INSERT_APP_REGISTRATION_DATA = "insert into\n" + - " app_registration_tmp (id, object_version, default_version, metadata_uri, name, type, uri, version) \n" + - " select id, object_Version, default_Version, metadata_Uri, name, type, uri, version\n" + - " from APP_REGISTRATION"; + " app_registration_tmp (id, object_version, default_version, metadata_uri, name, type, uri, version) \n" + + " select id, object_Version, default_Version, metadata_Uri, name, type, uri, version\n" + + " from APP_REGISTRATION"; public final static String DROP_APP_REGISTRATION_TABLE = "drop table APP_REGISTRATION"; @@ -61,9 +61,9 @@ public class MysqlBeforeBaseline extends AbstractBaselineCallback { public final static String INSERT_STREAM_DEFINITIONS_DATA = "insert into\n" + - " stream_definitions_tmp (definition_name, definition) \n" + - " select DEFINITION_NAME, DEFINITION\n" + - " from STREAM_DEFINITIONS"; + " stream_definitions_tmp (definition_name, definition) \n" + + " select DEFINITION_NAME, DEFINITION\n" + + " from STREAM_DEFINITIONS"; public final static String DROP_STREAM_DEFINITIONS_TABLE = "drop table STREAM_DEFINITIONS"; @@ -76,9 +76,9 @@ public class MysqlBeforeBaseline extends AbstractBaselineCallback { public final static String INSERT_TASK_DEFINITIONS_DATA = "insert into\n" + - " task_definitions_tmp (definition_name, definition) \n" + - " select DEFINITION_NAME, DEFINITION\n" + - " from TASK_DEFINITIONS"; + " task_definitions_tmp (definition_name, definition) \n" + + " select DEFINITION_NAME, DEFINITION\n" + + " from TASK_DEFINITIONS"; public final static String DROP_TASK_DEFINITIONS_TABLE = "drop table TASK_DEFINITIONS"; @@ -91,9 +91,9 @@ public class MysqlBeforeBaseline extends AbstractBaselineCallback { public final static String INSERT_AUDIT_RECORDS_DATA = "insert into\n" + - " audit_records_tmp (id, audit_action, audit_data, audit_operation, correlation_id, created_by, created_on)\n" + - " select id, audit_Action, audit_data, audit_Operation, correlation_id, created_by, created_On\n" + - " from AUDIT_RECORDS"; + " audit_records_tmp (id, audit_action, audit_data, audit_operation, correlation_id, created_by, created_on)\n" + + " select id, audit_Action, audit_data, audit_Operation, correlation_id, created_by, created_On\n" + + " from AUDIT_RECORDS"; public final static String DROP_AUDIT_RECORDS_TABLE = "drop table AUDIT_RECORDS"; diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlMigrateUriRegistrySqlCommand.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlMigrateUriRegistrySqlCommand.java index 8c542a0753..8aa810add8 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlMigrateUriRegistrySqlCommand.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlMigrateUriRegistrySqlCommand.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,7 +27,7 @@ * {@code URI_REGISTRY} into {@code app_registration}. * * @author Janne Valkealahti - * + * @author Chris Bono */ public class MysqlMigrateUriRegistrySqlCommand extends AbstractMigrateUriRegistrySqlCommand { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V10__DropJobConfigurationLocation.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V10__DropJobConfigurationLocation.java new file mode 100644 index 0000000000..c745ac2b4c --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V10__DropJobConfigurationLocation.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import java.util.Collections; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.server.db.migration.DropColumnSqlCommands; + +/** + * Removes extra JOB_CONFIGURATION_LOCATION columns. + * @author Corneil du Plessis + */ +public class V10__DropJobConfigurationLocation extends AbstractMigration { + public V10__DropJobConfigurationLocation() { + super(Collections.singletonList(new DropColumnSqlCommands("BOOT3_BATCH_JOB_EXECUTION.JOB_CONFIGURATION_LOCATION"))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V11__CreateBatchIndexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V11__CreateBatchIndexes.java new file mode 100644 index 0000000000..98924a10e1 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V11__CreateBatchIndexes.java @@ -0,0 +1,22 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateBatchIndexesMigration; + +public class V11__CreateBatchIndexes extends AbstractCreateBatchIndexesMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V12__CreateTaskParentIndex.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V12__CreateTaskParentIndex.java new file mode 100644 index 0000000000..e4a651874d --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V12__CreateTaskParentIndex.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateTaskParentIndexMigration; + +public class V12__CreateTaskParentIndex extends AbstractCreateTaskParentIndexMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V1__Initial_Setup.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V1__Initial_Setup.java index 323926bee3..e10ac0ad31 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V1__Initial_Setup.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V1__Initial_Setup.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,227 +25,227 @@ * Initial schema setup for {@code mysql}. * * @author Janne Valkealahti - * + * @author Chris Bono */ public class V1__Initial_Setup extends AbstractInitialSetupMigration { public final static String CREATE_HIBERNATE_SEQUENCE_TABLE = "create table if not exists hibernate_sequence (\n" + - " next_val bigint\n" + - ")"; + " next_val bigint\n" + + ")"; public final static String INSERT_HIBERNATE_SEQUENCE_TABLE = "insert into hibernate_sequence (next_val)\n" + - " select * from (select 1 as next_val) as temp\n" + - " where not exists(select * from hibernate_sequence)"; + " select * from (select 1 as next_val) as temp\n" + + " where not exists(select * from hibernate_sequence)"; public final static String CREATE_APP_REGISTRATION_TABLE = "create table app_registration (\n" + - " id bigint not null,\n" + - " object_version bigint,\n" + - " default_version bit,\n" + - " metadata_uri longtext,\n" + - " name varchar(255),\n" + - " type integer,\n" + - " uri longtext,\n" + - " version varchar(255),\n" + - " primary key (id)\n" + - ")"; + " id bigint not null,\n" + + " object_version bigint,\n" + + " default_version bit,\n" + + " metadata_uri longtext,\n" + + " name varchar(255),\n" + + " type integer,\n" + + " uri longtext,\n" + + " version varchar(255),\n" + + " primary key (id)\n" + + ")"; public final static String CREATE_AUDIT_RECORDS_TABLE = "create table audit_records (\n" + - " id bigint not null,\n" + - " audit_action bigint,\n" + - " audit_data longtext,\n" + - " audit_operation bigint,\n" + - " correlation_id varchar(255),\n" + - " created_by varchar(255),\n" + - " created_on datetime,\n" + - " primary key (id)\n" + - ")"; + " id bigint not null,\n" + + " audit_action bigint,\n" + + " audit_data longtext,\n" + + " audit_operation bigint,\n" + + " correlation_id varchar(255),\n" + + " created_by varchar(255),\n" + + " created_on datetime,\n" + + " primary key (id)\n" + + ")"; public final static String CREATE_STREAM_DEFINITIONS_TABLE = "create table stream_definitions (\n" + - " definition_name varchar(255) not null,\n" + - " definition longtext,\n" + - " primary key (definition_name)\n" + - ")"; + " definition_name varchar(255) not null,\n" + + " definition longtext,\n" + + " primary key (definition_name)\n" + + ")"; public final static String CREATE_TASK_DEFINITIONS_TABLE = "create table task_definitions (\n" + - " definition_name varchar(255) not null,\n" + - " definition longtext,\n" + - " primary key (definition_name)\n" + - ")"; + " definition_name varchar(255) not null,\n" + + " definition longtext,\n" + + " primary key (definition_name)\n" + + ")"; public final static String CREATE_TASK_DEPLOYMENT_TABLE = "create table task_deployment (\n" + - " id bigint not null,\n" + - " object_version bigint,\n" + - " task_deployment_id varchar(255) not null,\n" + - " task_definition_name varchar(255) not null,\n" + - " platform_name varchar(255) not null,\n" + - " created_on datetime,\n" + - " primary key (id)\n" + - ")"; + " id bigint not null,\n" + + " object_version bigint,\n" + + " task_deployment_id varchar(255) not null,\n" + + " task_definition_name varchar(255) not null,\n" + + " platform_name varchar(255) not null,\n" + + " created_on datetime,\n" + + " primary key (id)\n" + + ")"; public final static String CREATE_TASK_EXECUTION_TABLE = "CREATE TABLE TASK_EXECUTION (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " START_TIME DATETIME DEFAULT NULL,\n" + - " END_TIME DATETIME DEFAULT NULL,\n" + - " TASK_NAME VARCHAR(100),\n" + - " EXIT_CODE INTEGER,\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " ERROR_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED TIMESTAMP,\n" + - " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + - " PARENT_EXECUTION_ID BIGINT\n" + - ")"; + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME DATETIME DEFAULT NULL,\n" + + " END_TIME DATETIME DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ")"; public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = "CREATE TABLE TASK_EXECUTION_PARAMS (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " TASK_PARAM VARCHAR(2500),\n" + - " constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + - " references TASK_EXECUTION(TASK_EXECUTION_ID)\n" + - ")"; + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; public final static String CREATE_TASK_TASK_BATCH = "CREATE TABLE TASK_TASK_BATCH (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + - " references TASK_EXECUTION(TASK_EXECUTION_ID)\n" + - ")"; + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; private final static String CREATE_TASK_SEQ_SEQUENCE = "CREATE TABLE TASK_SEQ (\n" + - " ID BIGINT NOT NULL,\n" + - " UNIQUE_KEY CHAR(1) NOT NULL,\n" + - " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + - ")"; + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; private final static String INSERT_TASK_SEQ_SEQUENCE = "INSERT INTO TASK_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp"; public final static String CREATE_TASK_LOCK_TABLE = "CREATE TABLE TASK_LOCK (\n" + - " LOCK_KEY CHAR(36) NOT NULL,\n" + - " REGION VARCHAR(100) NOT NULL,\n" + - " CLIENT_ID CHAR(36),\n" + - " CREATED_DATE DATETIME(6) NOT NULL,\n" + - " constraint LOCK_PK primary key (LOCK_KEY, REGION)\n" + - ")"; + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE DATETIME(6) NOT NULL,\n" + + " constraint LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")"; public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = "CREATE TABLE BATCH_JOB_INSTANCE (\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_KEY VARCHAR(32) NOT NULL,\n" + - " constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + - ")"; + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ")"; public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = "CREATE TABLE BATCH_JOB_EXECUTION (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + - " CREATE_TIME DATETIME NOT NULL,\n" + - " START_TIME DATETIME DEFAULT NULL,\n" + - " END_TIME DATETIME DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED DATETIME,\n" + - " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + - " constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + - " references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + - ")"; + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME NOT NULL,\n" + + " START_TIME DATETIME DEFAULT NULL,\n" + + " END_TIME DATETIME DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME,\n" + + " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + + " constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ")"; public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = "CREATE TABLE BATCH_JOB_EXECUTION_PARAMS (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " TYPE_CD VARCHAR(6) NOT NULL,\n" + - " KEY_NAME VARCHAR(100) NOT NULL,\n" + - " STRING_VAL VARCHAR(250),\n" + - " DATE_VAL DATETIME DEFAULT NULL,\n" + - " LONG_VAL BIGINT,\n" + - " DOUBLE_VAL DOUBLE PRECISION,\n" + - " IDENTIFYING CHAR(1) NOT NULL,\n" + - " constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " TYPE_CD VARCHAR(6) NOT NULL,\n" + + " KEY_NAME VARCHAR(100) NOT NULL,\n" + + " STRING_VAL VARCHAR(250),\n" + + " DATE_VAL DATETIME DEFAULT NULL,\n" + + " LONG_VAL BIGINT,\n" + + " DOUBLE_VAL DOUBLE PRECISION,\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = "CREATE TABLE BATCH_STEP_EXECUTION (\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT NOT NULL,\n" + - " STEP_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " START_TIME DATETIME NOT NULL,\n" + - " END_TIME DATETIME DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " COMMIT_COUNT BIGINT,\n" + - " READ_COUNT BIGINT,\n" + - " FILTER_COUNT BIGINT,\n" + - " WRITE_COUNT BIGINT,\n" + - " READ_SKIP_COUNT BIGINT,\n" + - " WRITE_SKIP_COUNT BIGINT,\n" + - " PROCESS_SKIP_COUNT BIGINT,\n" + - " ROLLBACK_COUNT BIGINT,\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED DATETIME,\n" + - " constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " START_TIME DATETIME NOT NULL,\n" + + " END_TIME DATETIME DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME,\n" + + " constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = "CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT (\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT TEXT,\n" + - " constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + - " references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + - ")"; + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = "CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT TEXT,\n" + - " constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = "CREATE TABLE BATCH_STEP_EXECUTION_SEQ (\n" + - " ID BIGINT NOT NULL,\n" + - " UNIQUE_KEY CHAR(1) NOT NULL,\n" + - " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + - ")"; + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; public final static String INSERT_BATCH_STEP_EXECUTION_SEQUENCE = "INSERT INTO BATCH_STEP_EXECUTION_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_STEP_EXECUTION_SEQ)"; public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = "CREATE TABLE BATCH_JOB_EXECUTION_SEQ (\n" + - " ID BIGINT NOT NULL,\n" + - " UNIQUE_KEY CHAR(1) NOT NULL,\n" + - " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + - ")"; + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; public final static String INSERT_BATCH_JOB_EXECUTION_SEQUENCE = "INSERT INTO BATCH_JOB_EXECUTION_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_JOB_EXECUTION_SEQ)"; public final static String CREATE_BATCH_JOB_SEQUENCE = "CREATE TABLE BATCH_JOB_SEQ (\n" + - " ID BIGINT NOT NULL,\n" + - " UNIQUE_KEY CHAR(1) NOT NULL,\n" + - " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + - ")"; + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; public final static String INSERT_BATCH_JOB_SEQUENCE = "INSERT INTO BATCH_JOB_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_JOB_SEQ)"; diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V2__Add_Descriptions_And_OriginalDefinition.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V2__Add_Descriptions_And_OriginalDefinition.java index ae6119c0a1..de92311001 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V2__Add_Descriptions_And_OriginalDefinition.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V2__Add_Descriptions_And_OriginalDefinition.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,6 +30,7 @@ * @author Daniel Serleg * @author Ilayaperumal Gopinathan * @author Michael Minella + * @author Chris Bono * * @since 2.3 */ diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V3__Add_Platform_To_AuditRecords.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V3__Add_Platform_To_AuditRecords.java index cc76048696..0583e0fb18 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V3__Add_Platform_To_AuditRecords.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V3__Add_Platform_To_AuditRecords.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ * This migration class adds platformName column to audit_records. * * @author Daniel Serleg + * @author Chris Bono * * @since 2.4 */ diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V4__Add_Step_Name_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V4__Add_Step_Name_Indexes.java index cea57d5dae..e13053ced2 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V4__Add_Step_Name_Indexes.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V4__Add_Step_Name_Indexes.java @@ -1,5 +1,5 @@ /* - * Copyright 2020 the original author or authors. + * Copyright 2020-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ * This migration class adds index for STEP_NAME on BATCH_STEP_EXECUTION. * * @author Glenn Renfro + * @author Chris Bono * * @since 2.7 */ diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V5__Add_Task_Execution_Params_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V5__Add_Task_Execution_Params_Indexes.java new file mode 100644 index 0000000000..70d9696aa6 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V5__Add_Task_Execution_Params_Indexes.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds index for TASK_EXECUTION_ID on TASK_EXECUTION_PARAMS. + * + * @author Claudio Tasso + * + * @since 2.10 + */ +public class V5__Add_Task_Execution_Params_Indexes extends BaseJavaMigration { + + public final static String ADD_INDEX_TO_STEP_EXECUTION_PARAMS = "create index TASK_EXECUTION_ID_IDX on TASK_EXECUTION_PARAMS (TASK_EXECUTION_ID)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ADD_INDEX_TO_STEP_EXECUTION_PARAMS))); + + + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V6__Boot3_Boot_Version.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V6__Boot3_Boot_Version.java new file mode 100644 index 0000000000..7622e5aee4 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V6__Boot3_Boot_Version.java @@ -0,0 +1,28 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractBootVersionMigration; + +/** + * The boot_version field will provide for indicating the version of + * Spring Boot used by the application and by implication the + * schema version of task and batch tables. + * @author Corneil du Plessis + * @since 2.11 + */ +public class V6__Boot3_Boot_Version extends AbstractBootVersionMigration { +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V7__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V7__Boot3_Add_Task3_Batch5_Schema.java new file mode 100644 index 0000000000..03a2af1b4e --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V7__Boot3_Add_Task3_Batch5_Schema.java @@ -0,0 +1,262 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractBoot3InitialSetupMigration; + +/** + * Adds the spring-cloud-task V3 + * and spring-batch V5 + * schemas to support Boot3 compatability. + *

    Schemas have added table prefix of {@code "BOOT3_"}. + * + * @author Chris Bono + * @author Corneil du Plessis + */ +public class V7__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetupMigration { + + private final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME DATETIME(6) DEFAULT NULL,\n" + + " END_TIME DATETIME(6) DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint BOOT3_TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE BOOT3_TASK_LOCK (\n" + + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE DATETIME(6) NOT NULL,\n" + + " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_SEQ_SEQUENCE = + "CREATE TABLE BOOT3_TASK_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String INIT_TASK_SEQ = + "INSERT INTO BOOT3_TASK_SEQ (ID, UNIQUE_KEY)\n" + + "select *\n" + + "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp"; + + private final static String CREATE_TASK_EXECUTION_METADATA_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST TEXT,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_TASK_EXECUTION_METADATA_SEQ_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; + + private final static String INIT_TASK_EXECUTION_METADATA_SEQ = + "INSERT INTO BOOT3_TASK_EXECUTION_METADATA_SEQ (ID, UNIQUE_KEY)\n" + + "select *\n" + + "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp\n" + + "where not exists(select * from BOOT3_TASK_EXECUTION_METADATA_SEQ)"; + + private final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE\n" + + "(\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION\n" + + "(\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME(6) NOT NULL,\n" + + " START_TIME DATETIME(6) DEFAULT NULL,\n" + + " END_TIME DATETIME(6) DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME(6),\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BOOT3_BATCH_JOB_INSTANCE (JOB_INSTANCE_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500),\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME(6) NOT NULL,\n" + + " START_TIME DATETIME(6) DEFAULT NULL,\n" + + " END_TIME DATETIME(6) DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME(6),\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT\n" + + "(\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION (STEP_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT\n" + + "(\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_SEQ = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String INIT_BATCH_STEP_EXECUTION_SEQ = + "INSERT INTO BOOT3_BATCH_STEP_EXECUTION_SEQ (ID, UNIQUE_KEY)\n" + + "select *\n" + + "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp\n" + + "where not exists(select * from BOOT3_BATCH_STEP_EXECUTION_SEQ)"; + + private final static String CREATE_BATCH_JOB_EXECUTION_SEQ = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String INIT_BATCH_JOB_EXECUTION_SEQ = + "INSERT INTO BOOT3_BATCH_JOB_EXECUTION_SEQ (ID, UNIQUE_KEY)\n" + + "select *\n" + + "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp\n" + + "where not exists(select * from BOOT3_BATCH_JOB_EXECUTION_SEQ)"; + + private final static String CREATE_BATCH_JOB_SEQ = + "CREATE TABLE BOOT3_BATCH_JOB_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String INIT_BATCH_JOB_SEQ = + "INSERT INTO BOOT3_BATCH_JOB_SEQ (ID, UNIQUE_KEY)\n" + + "select *\n" + + "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp\n" + + "where not exists(select * from BOOT3_BATCH_JOB_SEQ)"; + + @Override + public List createTask3Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_TASK_TASK_BATCH), + SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(INIT_TASK_SEQ), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ_TABLE), + SqlCommand.from(INIT_TASK_EXECUTION_METADATA_SEQ)); + } + + @Override + public List createBatch5Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_BATCH_JOB_INSTANCE_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQ), + SqlCommand.from(INIT_BATCH_STEP_EXECUTION_SEQ), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQ), + SqlCommand.from(INIT_BATCH_JOB_EXECUTION_SEQ), + SqlCommand.from(CREATE_BATCH_JOB_SEQ), + SqlCommand.from(INIT_BATCH_JOB_SEQ) + ); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V8__RenameLowerCaseTables.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V8__RenameLowerCaseTables.java new file mode 100644 index 0000000000..4d003291dd --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V8__RenameLowerCaseTables.java @@ -0,0 +1,36 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractCaseSensitiveMigration; +/** + * Since MySQL/MariaDB operates in a case-sensitive mode for table and column names we need TASK_ tables referenced with a prefix to be uppercase. + * @author Corneil du Plessis + */ +public class V8__RenameLowerCaseTables extends AbstractCaseSensitiveMigration { + public List getCommands() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_LC), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_SEQ_LC_TBL), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_SEQ_TBL) + ); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V9__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V9__AddAggregateViews.java new file mode 100644 index 0000000000..c410878f66 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V9__AddAggregateViews.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V9__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V10__DropJobConfigurationLocation.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V10__DropJobConfigurationLocation.java new file mode 100644 index 0000000000..6e85f9084d --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V10__DropJobConfigurationLocation.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import java.util.Collections; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.server.db.migration.DropColumnSqlCommands; + +/** + * Removes extra JOB_CONFIGURATION_LOCATION columns. + * @author Corneil du Plessis + */ +public class V10__DropJobConfigurationLocation extends AbstractMigration { + public V10__DropJobConfigurationLocation() { + super(Collections.singletonList(new DropColumnSqlCommands("BOOT3_BATCH_JOB_EXECUTION.JOB_CONFIGURATION_LOCATION"))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V11__CreateBatchIndexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V11__CreateBatchIndexes.java new file mode 100644 index 0000000000..e615099a17 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V11__CreateBatchIndexes.java @@ -0,0 +1,22 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateBatchIndexesMigration; + +public class V11__CreateBatchIndexes extends AbstractCreateBatchIndexesMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V12__CreateTaskParentIndex.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V12__CreateTaskParentIndex.java new file mode 100644 index 0000000000..1c712a627e --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V12__CreateTaskParentIndex.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateTaskParentIndexMigration; + +public class V12__CreateTaskParentIndex extends AbstractCreateTaskParentIndexMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V1__Initial_Setup.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V1__Initial_Setup.java index 19eea4e91b..eab14faf8b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V1__Initial_Setup.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V1__Initial_Setup.java @@ -111,7 +111,7 @@ public class V1__Initial_Setup extends AbstractInitialSetupMigration { ")"; private final static String CREATE_TASK_SEQ_SEQUENCE = - "CREATE SEQUENCE TASK_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE"; + "CREATE SEQUENCE TASK_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE"; public final static String CREATE_TASK_LOCK_TABLE = "CREATE TABLE TASK_LOCK (\n" + @@ -205,13 +205,13 @@ public class V1__Initial_Setup extends AbstractInitialSetupMigration { ")"; public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = - "CREATE SEQUENCE BATCH_STEP_EXECUTION_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCYCLE"; + "CREATE SEQUENCE BATCH_STEP_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NOCYCLE"; public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = - "CREATE SEQUENCE BATCH_JOB_EXECUTION_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCYCLE"; + "CREATE SEQUENCE BATCH_JOB_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NOCYCLE"; public final static String CREATE_BATCH_JOB_SEQUENCE = - "CREATE SEQUENCE BATCH_JOB_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCYCLE"; + "CREATE SEQUENCE BATCH_JOB_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NOCYCLE"; public V1__Initial_Setup() { super(null); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V2__Add_Descriptions_And_OriginalDefinition.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V2__Add_Descriptions_And_OriginalDefinition.java index 931a429d18..b7580d3950 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V2__Add_Descriptions_And_OriginalDefinition.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V2__Add_Descriptions_And_OriginalDefinition.java @@ -55,7 +55,7 @@ public class V2__Add_Descriptions_And_OriginalDefinition extends BaseJavaMigrati ")"; private final static String CREATE_TASK_METADATA_SEQUENCE = - "CREATE SEQUENCE task_execution_metadata_seq START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE"; + "CREATE SEQUENCE task_execution_metadata_seq START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE"; private final SqlCommandsRunner runner = new SqlCommandsRunner(); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V5__Add_Task_Execution_Params_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V5__Add_Task_Execution_Params_Indexes.java new file mode 100644 index 0000000000..f8aa5deadd --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V5__Add_Task_Execution_Params_Indexes.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds index for TASK_EXECUTION_ID on TASK_EXECUTION_PARAMS. + * + * @author Claudio Tasso + * + * @since 2.10 + */ +public class V5__Add_Task_Execution_Params_Indexes extends BaseJavaMigration { + + public final static String ADD_INDEX_TO_STEP_EXECUTION_PARAMS = "create index TASK_EXECUTION_ID_IDX on TASK_EXECUTION_PARAMS (TASK_EXECUTION_ID)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ADD_INDEX_TO_STEP_EXECUTION_PARAMS))); + + + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V6__Boot3_Boot_Version.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V6__Boot3_Boot_Version.java new file mode 100644 index 0000000000..c6f11a253c --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V6__Boot3_Boot_Version.java @@ -0,0 +1,28 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractBootVersionMigration; + +/** + * The boot_version field will provide for indicating the version of + * Spring Boot used by the application and by implication the + * schema version of task and batch tables. + * @author Corneil du Plessis + * @since 2.11 + */ +public class V6__Boot3_Boot_Version extends AbstractBootVersionMigration { +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V7__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V7__Boot3_Add_Task3_Batch5_Schema.java new file mode 100644 index 0000000000..b148485574 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V7__Boot3_Add_Task3_Batch5_Schema.java @@ -0,0 +1,203 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractBoot3InitialSetupMigration; + +/** + * Adds the spring-cloud-task V3 + * and spring-batch V5 + * schemas to support Boot3 compatability. + *

    Schemas have added table prefix of {@code "BOOT3_"}. + * + * @author Chris Bono + */ +public class V7__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetupMigration { + + public final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + + "TASK_EXECUTION_ID NUMBER NOT NULL PRIMARY KEY ,\n" + + "START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "TASK_NAME VARCHAR2(100),\n" + + "EXIT_CODE INTEGER,\n" + + "EXIT_MESSAGE VARCHAR2(2500),\n" + + "ERROR_MESSAGE VARCHAR2(2500),\n" + + "LAST_UPDATED TIMESTAMP(9),\n" + + "EXTERNAL_EXECUTION_ID VARCHAR2(255),\n" + + "PARENT_EXECUTION_ID NUMBER\n" + + ")SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + + "TASK_EXECUTION_ID NUMBER NOT NULL,\n" + + "TASK_PARAM VARCHAR2(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + + "TASK_EXECUTION_ID NUMBER NOT NULL,\n" + + "JOB_EXECUTION_ID NUMBER NOT NULL,\n" + + " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE BOOT3_TASK_LOCK (\n" + + "LOCK_KEY VARCHAR2(36) NOT NULL,\n" + + "REGION VARCHAR2(100) NOT NULL,\n" + + "CLIENT_ID VARCHAR2(36),\n" + + "CREATED_DATE TIMESTAMP(9) NOT NULL,\n" + + "constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")SEGMENT CREATION IMMEDIATE"; + + private final static String CREATE_TASK_SEQ_SEQUENCE = + "CREATE SEQUENCE BOOT3_TASK_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; + + private final static String CREATE_TASK_EXECUTION_METADATA_TABLE = "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID NUMBER NOT NULL,\n" + + " TASK_EXECUTION_ID NUMBER NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST CLOB,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + private final static String CREATE_TASK_EXECUTION_METADATA_SEQ = "CREATE SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 ORDER NOCYCLE"; + + public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE (\n" + + "JOB_INSTANCE_ID NUMBER(19,0) NOT NULL PRIMARY KEY ,\n" + + "VERSION NUMBER(19,0) ,\n" + + "JOB_NAME VARCHAR2(100 char) NOT NULL,\n" + + "JOB_KEY VARCHAR2(32 char) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION (\n" + + "JOB_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY,\n" + + "VERSION NUMBER(19,0),\n" + + "JOB_INSTANCE_ID NUMBER(19,0) NOT NULL,\n" + + "CREATE_TIME TIMESTAMP(9) NOT NULL,\n" + + "START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "STATUS VARCHAR2(10 char),\n" + + "EXIT_CODE VARCHAR2(2500 char),\n" + + "EXIT_MESSAGE VARCHAR2(2500 char),\n" + + "LAST_UPDATED TIMESTAMP(9),\n" + + "JOB_CONFIGURATION_LOCATION VARCHAR(2500 char) NULL,\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + "references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + + "JOB_EXECUTION_ID NUMBER(19,0) NOT NULL,\n" + + "TYPE_CD VARCHAR2(6 char) NOT NULL,\n" + + "KEY_NAME VARCHAR2(100 char) NOT NULL,\n" + + "STRING_VAL VARCHAR2(250 char),\n" + + "DATE_VAL TIMESTAMP(9) DEFAULT NULL,\n" + + "LONG_VAL NUMBER(19,0),\n" + + "DOUBLE_VAL NUMBER,\n" + + "IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + + "STEP_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY,\n" + + "VERSION NUMBER(19,0) NOT NULL,\n" + + "STEP_NAME VARCHAR2(100 char) NOT NULL,\n" + + "JOB_EXECUTION_ID NUMBER(19,0) NOT NULL,\n" + + "CREATE_TIME TIMESTAMP(9) NOT NULL,\n" + + "START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "STATUS VARCHAR2(10 char),\n" + + "COMMIT_COUNT NUMBER(19,0),\n" + + "READ_COUNT NUMBER(19,0),\n" + + "FILTER_COUNT NUMBER(19,0),\n" + + "WRITE_COUNT NUMBER(19,0),\n" + + "READ_SKIP_COUNT NUMBER(19,0),\n" + + "WRITE_SKIP_COUNT NUMBER(19,0),\n" + + "PROCESS_SKIP_COUNT NUMBER(19,0),\n" + + "ROLLBACK_COUNT NUMBER(19,0),\n" + + "EXIT_CODE VARCHAR2(2500 char),\n" + + "EXIT_MESSAGE VARCHAR2(2500 char),\n" + + "LAST_UPDATED TIMESTAMP(9),\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT (\n" + + "STEP_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY,\n" + + "SHORT_CONTEXT VARCHAR2(2500 char) NOT NULL,\n" + + "SERIALIZED_CONTEXT CLOB,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT (\n" + + "JOB_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY,\n" + + "SHORT_CONTEXT VARCHAR2(2500 char) NOT NULL,\n" + + "SERIALIZED_CONTEXT CLOB,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; + + public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; + + public final static String CREATE_BATCH_JOB_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; + + @Override + public List createTask3Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_TASK_TASK_BATCH), + SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ)); + } + + @Override + public List createBatch5Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_BATCH_JOB_INSTANCE_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V8__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V8__AddAggregateViews.java new file mode 100644 index 0000000000..75f9af4815 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V8__AddAggregateViews.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V8__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V9__Boot3_Batch5_Job_Execution_Params_Column_Fix.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V9__Boot3_Batch5_Job_Execution_Params_Column_Fix.java new file mode 100644 index 0000000000..441402c34b --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V9__Boot3_Batch5_Job_Execution_Params_Column_Fix.java @@ -0,0 +1,53 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * Fixes the names of the {@code BOOT3_BATCH_JOB_EXECUTION_PARAMS} parameter columns. + * + * @author Chris Bono + */ +public class V9__Boot3_Batch5_Job_Execution_Params_Column_Fix extends BaseJavaMigration { + + public final static String DROP_COLUMNS_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS DROP (TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, " + + "LONG_VAL, DOUBLE_VAL, IDENTIFYING)"; + + public final static String ADD_COLUMNS_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS ADD (\n" + + " PARAMETER_NAME VARCHAR(100 char) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100 char) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500 char),\n" + + " IDENTIFYING CHAR(1) NOT NULL\n" + + ")"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(DROP_COLUMNS_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(ADD_COLUMNS_BATCH_JOB_EXECUTION_PARAMS_TABLE))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V10__ChangeTextTypes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V10__ChangeTextTypes.java new file mode 100644 index 0000000000..7d4cba6cef --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V10__ChangeTextTypes.java @@ -0,0 +1,38 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.server.db.migration.PostgreSQLTextToOID; + +public class V10__ChangeTextTypes extends AbstractMigration { + + public V10__ChangeTextTypes() { + super(null); + } + + + @Override + public void migrate(Context context) throws Exception { + PostgreSQLTextToOID.convertColumnFromOID("app_registration", "id", "uri", context.getConfiguration().getDataSource()); + PostgreSQLTextToOID.convertColumnFromOID("app_registration", "id", "metadata_uri", context.getConfiguration().getDataSource()); + PostgreSQLTextToOID.convertColumnFromOID("stream_definitions", "definition_name", "definition", context.getConfiguration().getDataSource()); + PostgreSQLTextToOID.convertColumnFromOID("stream_definitions", "definition_name", "original_definition", context.getConfiguration().getDataSource()); + PostgreSQLTextToOID.convertColumnFromOID("task_definitions", "definition_name", "definition", context.getConfiguration().getDataSource()); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V11__DropJobConfigurationLocation.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V11__DropJobConfigurationLocation.java new file mode 100644 index 0000000000..6f654936e4 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V11__DropJobConfigurationLocation.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import java.util.Collections; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.server.db.migration.DropColumnSqlCommands; + +/** + * Removes extra JOB_CONFIGURATION_LOCATION columns. + * @author Corneil du Plessis + */ +public class V11__DropJobConfigurationLocation extends AbstractMigration { + public V11__DropJobConfigurationLocation() { + super(Collections.singletonList(new DropColumnSqlCommands("BOOT3_BATCH_JOB_EXECUTION.JOB_CONFIGURATION_LOCATION"))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V12__CreateBatchIndexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V12__CreateBatchIndexes.java new file mode 100644 index 0000000000..7f29bba3ff --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V12__CreateBatchIndexes.java @@ -0,0 +1,22 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateBatchIndexesMigration; + +public class V12__CreateBatchIndexes extends AbstractCreateBatchIndexesMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V13__CreateTaskParentIndex.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V13__CreateTaskParentIndex.java new file mode 100644 index 0000000000..e37e523924 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V13__CreateTaskParentIndex.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateTaskParentIndexMigration; + +public class V13__CreateTaskParentIndex extends AbstractCreateTaskParentIndexMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V5__Add_Task_Execution_Params_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V5__Add_Task_Execution_Params_Indexes.java new file mode 100644 index 0000000000..4be04498bc --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V5__Add_Task_Execution_Params_Indexes.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds index for TASK_EXECUTION_ID on TASK_EXECUTION_PARAMS. + * + * @author Claudio Tasso + * + * @since 2.10 + */ +public class V5__Add_Task_Execution_Params_Indexes extends BaseJavaMigration { + + public final static String ADD_INDEX_TO_STEP_EXECUTION_PARAMS = "create index TASK_EXECUTION_ID_IDX on TASK_EXECUTION_PARAMS (TASK_EXECUTION_ID)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ADD_INDEX_TO_STEP_EXECUTION_PARAMS))); + + + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V6__Boot3_Boot_Version.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V6__Boot3_Boot_Version.java new file mode 100644 index 0000000000..fdaee27127 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V6__Boot3_Boot_Version.java @@ -0,0 +1,28 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractBootVersionMigration; + +/** + * The boot_version field will provide for indicating the version of + * Spring Boot used by the application and by implication the + * schema version of task and batch tables. + * @author Corneil du Plessis + * @since 2.11 + */ +public class V6__Boot3_Boot_Version extends AbstractBootVersionMigration { +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V7__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V7__Boot3_Add_Task3_Batch5_Schema.java new file mode 100644 index 0000000000..1898bab5d3 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V7__Boot3_Add_Task3_Batch5_Schema.java @@ -0,0 +1,205 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractBoot3InitialSetupMigration; + +/** + * Adds the spring-cloud-task V3 + * and spring-batch V5 + * schemas to support Boot3 compatability. + *

    Schemas have added table prefix of {@code "BOOT3_"}. + * + * @author Chris Bono + */ +public class V7__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetupMigration { + + public final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME TIMESTAMP DEFAULT NULL,\n" + + " END_TIME TIMESTAMP DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ")"; + + public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE BOOT3_TASK_LOCK (\n" + + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE TIMESTAMP NOT NULL,\n" + + " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")"; + + private final static String CREATE_TASK_SEQ_SEQUENCE = + "CREATE SEQUENCE BOOT3_TASK_SEQ MAXVALUE 9223372036854775807 NO CYCLE"; + + private final static String CREATE_TASK_EXECUTION_METADATA_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST TEXT,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ");"; + + private final static String CREATE_TASK_EXECUTION_METADATA_SEQ = + "CREATE SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ MAXVALUE 9223372036854775807 NO CYCLE"; + + public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE (\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME TIMESTAMP NOT NULL,\n" + + " START_TIME TIMESTAMP DEFAULT NULL,\n" + + " END_TIME TIMESTAMP DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500),\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " CREATE_TIME TIMESTAMP NOT NULL,\n" + + " START_TIME TIMESTAMP DEFAULT NULL,\n" + + " END_TIME TIMESTAMP DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ MAXVALUE 9223372036854775807 NO CYCLE"; + + public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ MAXVALUE 9223372036854775807 NO CYCLE"; + + public final static String CREATE_BATCH_JOB_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_SEQ MAXVALUE 9223372036854775807 NO CYCLE"; + + @Override + public List createTask3Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_TASK_TASK_BATCH), + SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ) + ); + } + + @Override + public List createBatch5Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_BATCH_JOB_INSTANCE_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE) + ); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V8__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V8__AddAggregateViews.java new file mode 100644 index 0000000000..f85e45b606 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V8__AddAggregateViews.java @@ -0,0 +1,22 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V8__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V9__ChangeTextTypes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V9__ChangeTextTypes.java new file mode 100644 index 0000000000..68c6e22d09 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V9__ChangeTextTypes.java @@ -0,0 +1,34 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; + + +public class V9__ChangeTextTypes extends AbstractMigration { + + public V9__ChangeTextTypes() { + super(null); + } + + + @Override + public void migrate(Context context) throws Exception { + // perform no conversions + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/R__Hibernate_Sequence.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/R__Hibernate_Sequence.java index 69625e1561..490808d77d 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/R__Hibernate_Sequence.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/R__Hibernate_Sequence.java @@ -74,7 +74,7 @@ public void migrate(Context context) throws Exception { logger.info("Looks like we have hibernate_sequence table, initiate fix"); } catch (Exception e) { - logger.debug("Unable to query hibernate_sequence table, looks like we have a proper sequence", e); + logger.debug("Unable to query hibernate_sequence table, looks like we have a proper sequence" + e); } // will result call to get commands from this class and then we choose which ones to run super.migrate(context); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V10__CreateBatchIndexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V10__CreateBatchIndexes.java new file mode 100644 index 0000000000..2838935906 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V10__CreateBatchIndexes.java @@ -0,0 +1,22 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateBatchIndexesMigration; + +public class V10__CreateBatchIndexes extends AbstractCreateBatchIndexesMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V11__CreateTaskParentIndex.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V11__CreateTaskParentIndex.java new file mode 100644 index 0000000000..a6000b3372 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V11__CreateTaskParentIndex.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateTaskParentIndexMigration; + +public class V11__CreateTaskParentIndex extends AbstractCreateTaskParentIndexMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V4_1__Task_Sequence.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V4_1__Task_Sequence.java index a767ed2d51..21da07bc1c 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V4_1__Task_Sequence.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V4_1__Task_Sequence.java @@ -69,7 +69,7 @@ public void migrate(Context context) throws Exception { logger.info("Looks like we have TASK_SEQ table, initiate fix"); } catch (Exception e) { - logger.debug("Unable to query TASK_SEQ table, a TASK_SEQ sequence may already exist", e); + logger.debug("Unable to query TASK_SEQ table, a TASK_SEQ sequence may already exist" + e); } // will result call to get commands from this class and then we choose which ones to run super.migrate(context); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V5__Add_Task_Execution_Params_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V5__Add_Task_Execution_Params_Indexes.java new file mode 100644 index 0000000000..81b99b0153 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V5__Add_Task_Execution_Params_Indexes.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds index for TASK_EXECUTION_ID on TASK_EXECUTION_PARAMS. + * + * @author Claudio Tasso + * + * @since 2.10 + */ +public class V5__Add_Task_Execution_Params_Indexes extends BaseJavaMigration { + + public final static String ADD_INDEX_TO_STEP_EXECUTION_PARAMS = "create index TASK_EXECUTION_ID_IDX on TASK_EXECUTION_PARAMS (TASK_EXECUTION_ID)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ADD_INDEX_TO_STEP_EXECUTION_PARAMS))); + + + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V6__Boot3_Boot_Version.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V6__Boot3_Boot_Version.java new file mode 100644 index 0000000000..5c8a16b492 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V6__Boot3_Boot_Version.java @@ -0,0 +1,28 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractBootVersionMigration; + +/** + * The boot_version field will provide for indicating the version of + * Spring Boot used by the application and by implication the + * schema version of task and batch tables. + * @author Corneil du Plessis + * @since 2.11 + */ +public class V6__Boot3_Boot_Version extends AbstractBootVersionMigration { +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V7__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V7__Boot3_Add_Task3_Batch5_Schema.java new file mode 100644 index 0000000000..3c1f1437c3 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V7__Boot3_Add_Task3_Batch5_Schema.java @@ -0,0 +1,202 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractBoot3InitialSetupMigration; + +/** + * Adds the spring-cloud-task V3 + * and spring-batch V5 + * schemas to support Boot3 compatability. + *

    Schemas have added table prefix of {@code "BOOT3_"}. + * + * @author Chris Bono + */ +public class V7__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetupMigration { + + public final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME DATETIME2 DEFAULT NULL,\n" + + " END_TIME DATETIME2 DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME2,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ")"; + + public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE BOOT3_TASK_LOCK (\n" + + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE DATETIME2 NOT NULL,\n" + + " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")"; + + public final static String CREATE_TASK_EXECUTION_METADATA_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST VARCHAR(MAX) NULL,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_TASK_EXECUTION_METADATA_SEQ = + "CREATE SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE"; + + private final static String CREATE_TASK_SEQ_SEQUENCE = + "CREATE SEQUENCE BOOT3_TASK_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE"; + + public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE (\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME2 NOT NULL,\n" + + " START_TIME DATETIME2 DEFAULT NULL,\n" + + " END_TIME DATETIME2 DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME2,\n" + + " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500),\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME2 NOT NULL,\n" + + " START_TIME DATETIME2 NOT NULL,\n" + + " END_TIME DATETIME2 DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME2,\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE"; + + public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE"; + + public final static String CREATE_BATCH_JOB_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE"; + + @Override + public List createTask3Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_TASK_TASK_BATCH), + SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ)); + } + + @Override + public List createBatch5Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_BATCH_JOB_INSTANCE_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V8__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V8__AddAggregateViews.java new file mode 100644 index 0000000000..d5f32a2f27 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V8__AddAggregateViews.java @@ -0,0 +1,22 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V8__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V9__DropJobConfigurationLocation.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V9__DropJobConfigurationLocation.java new file mode 100644 index 0000000000..871be2c6d6 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V9__DropJobConfigurationLocation.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import java.util.Collections; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.server.db.migration.DropColumnSqlCommands; + +/** + * Removes extra JOB_CONFIGURATION_LOCATION columns. + * @author Corneil du Plessis + */ +public class V9__DropJobConfigurationLocation extends AbstractMigration { + public V9__DropJobConfigurationLocation() { + super(Collections.singletonList(new DropColumnSqlCommands("BOOT3_BATCH_JOB_EXECUTION.JOB_CONFIGURATION_LOCATION"))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/TaskExplorerFactoryBean.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/TaskExplorerFactoryBean.java index f4f0af2734..b282141601 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/TaskExplorerFactoryBean.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/TaskExplorerFactoryBean.java @@ -19,9 +19,9 @@ import javax.sql.DataSource; import org.springframework.beans.factory.FactoryBean; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaTaskExecutionDaoFactoryBean; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.support.SimpleTaskExplorer; -import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; import org.springframework.util.Assert; /** @@ -31,19 +31,19 @@ */ public class TaskExplorerFactoryBean implements FactoryBean { - private DataSource dataSource; - + private final DataSource dataSource; private TaskExplorer taskExplorer; - - public TaskExplorerFactoryBean(DataSource dataSource) { + private final String tablePrefix; + public TaskExplorerFactoryBean(DataSource dataSource, String tablePrefix) { Assert.notNull(dataSource, "dataSource must not be null"); this.dataSource = dataSource; + this.tablePrefix = tablePrefix; } @Override public TaskExplorer getObject() throws Exception { if (taskExplorer == null) { - taskExplorer = new SimpleTaskExplorer(new TaskExecutionDaoFactoryBean(dataSource)); + taskExplorer = new SimpleTaskExplorer(new MultiSchemaTaskExecutionDaoFactoryBean(dataSource, tablePrefix)); } return taskExplorer; } @@ -53,9 +53,4 @@ public Class getObjectType() { return TaskExplorer.class; } - @Override - public boolean isSingleton() { - return true; - } - } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionResourceBuilder.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionResourceBuilder.java index 889ca09c2c..7939e2679b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionResourceBuilder.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionResourceBuilder.java @@ -18,12 +18,19 @@ import org.springframework.batch.core.Step; import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.launch.NoSuchJobExecutionException; import org.springframework.batch.core.step.tasklet.TaskletStep; import org.springframework.cloud.dataflow.rest.job.support.StepType; import org.springframework.cloud.dataflow.rest.job.support.TaskletType; import org.springframework.cloud.dataflow.rest.resource.StepExecutionResource; +import org.springframework.cloud.dataflow.server.batch.NoSuchStepExecutionException; +import org.springframework.cloud.dataflow.server.controller.JobStepExecutionController; +import org.springframework.cloud.dataflow.server.controller.JobStepExecutionProgressController; import org.springframework.util.Assert; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * Knows how to build a StepExecutionResource out of our domain model * {@link StepExecution}. @@ -33,8 +40,25 @@ */ public class StepExecutionResourceBuilder { - static public StepExecutionResource toModel(StepExecution entity) { - return new StepExecutionResource(entity.getJobExecution().getId(), entity, generateStepType(entity)); + static public StepExecutionResource toModel(StepExecution entity, String schemaTarget) { + StepExecutionResource resource = new StepExecutionResource(entity.getJobExecution().getId(), entity, generateStepType(entity), schemaTarget); + try { + resource.add( + linkTo( + methodOn(JobStepExecutionController.class) + .getStepExecution(resource.getStepExecution().getJobExecutionId(), resource.getStepExecution().getId(), schemaTarget) + ).withSelfRel() + ); + resource.add( + linkTo( + methodOn(JobStepExecutionProgressController.class) + .progress(resource.getStepExecution().getJobExecutionId(), resource.getStepExecution().getId(), schemaTarget) + ).withRel("progress") + ); + } catch (NoSuchStepExecutionException | NoSuchJobExecutionException e) { + throw new RuntimeException(e); + } + return resource; } private static String generateStepType(StepExecution stepExecution) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/AggregateJobQueryDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/AggregateJobQueryDao.java new file mode 100644 index 0000000000..3d77fc8d30 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/AggregateJobQueryDao.java @@ -0,0 +1,66 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.repository; + + +import java.util.Collection; +import java.util.Date; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.JobInstance; +import org.springframework.batch.core.launch.NoSuchJobException; +import org.springframework.batch.core.launch.NoSuchJobExecutionException; +import org.springframework.batch.core.launch.NoSuchJobInstanceException; +import org.springframework.cloud.dataflow.rest.job.JobInstanceExecutions; +import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; + +/** + * Provides for reading job execution data for Batch 4 and 5 schema versions. + * + * @author Corneil du Plessis + * @since 2.11.0 + */ +public interface AggregateJobQueryDao { + Page listJobInstances(String jobName, Pageable pageable) throws NoSuchJobException; + + Page listJobExecutions(String jobName, BatchStatus status, Pageable pageable) throws NoSuchJobExecutionException; + + Page listJobExecutionsBetween(Date fromDate, Date toDate, Pageable pageable); + + Page listJobExecutionsWithSteps(Pageable pageable); + + Page listJobExecutionsWithStepCount(Pageable pageable); + + Page listJobExecutionsForJobWithStepCountFilteredByJobInstanceId(int jobInstanceId, String schemaTarget, Pageable pageable); + + Page listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId(int taskExecutionId, String schemaTarget, Pageable pageable); + + Page listJobExecutionsForJobWithStepCount(String jobName, Pageable pageable) throws NoSuchJobException; + + TaskJobExecution getJobExecution(long id, String schemaTarget) throws NoSuchJobExecutionException; + + JobInstanceExecutions getJobInstanceExecution(String jobName, long instanceId); + + JobInstanceExecutions getJobInstanceExecutions(long id, String schemaTarget); + + JobInstance getJobInstance(long id, String schemaTarget) throws NoSuchJobInstanceException; + + void populateCtrStatus(Collection aggregateTaskExecutions); + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowJobExecutionDaoContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowJobExecutionDaoContainer.java new file mode 100644 index 0000000000..17bd4a9aac --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowJobExecutionDaoContainer.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository; + +import java.util.HashMap; +import java.util.Map; + +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +public class DataflowJobExecutionDaoContainer { + private final Map jobExecutionDaos = new HashMap<>(); + + public DataflowJobExecutionDaoContainer() { + } + + public void add(String name, DataflowJobExecutionDao jobExecutionDao) { + jobExecutionDaos.put(name, jobExecutionDao); + } + + public DataflowJobExecutionDao get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + DataflowJobExecutionDao result = jobExecutionDaos.get(schemaTarget); + Assert.notNull(result, "Expected to find jobExecutionDao for " + schemaTarget); + return result; + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionDao.java index cdf2eb6417..b0e5f8bdd0 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionDao.java @@ -74,6 +74,8 @@ public interface DataflowTaskExecutionDao { * Returns the size of all the task executions with the option to include only the completed executions. * @param onlyCompleted filter by completed task executions * @param taskName the task name, if null then retrieve all the tasks + * + * @return The count of task executions matching inputs. */ Integer getAllTaskExecutionsCount(boolean onlyCompleted, String taskName); @@ -81,6 +83,8 @@ public interface DataflowTaskExecutionDao { * Returns all the task execution IDs of the completed task executions. * @param onlyCompleted filter by completed task executions * @param taskName the task name, if null then retrieve all the tasks + * + * @return The set of all execution ids matching inputs. */ Set getAllTaskExecutionIds(boolean onlyCompleted, String taskName); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionDaoContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionDaoContainer.java new file mode 100644 index 0000000000..7badea92bf --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionDaoContainer.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository; + +import java.util.HashMap; +import java.util.Map; + +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +public class DataflowTaskExecutionDaoContainer { + private final Map taskExecutionContainer = new HashMap<>(); + + public DataflowTaskExecutionDaoContainer() { + } + + public void add(String schemaTarget, DataflowTaskExecutionDao dataflowTaskExecutionDao) { + taskExecutionContainer.put(schemaTarget, dataflowTaskExecutionDao); + } + + public DataflowTaskExecutionDao get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + DataflowTaskExecutionDao result = taskExecutionContainer.get(schemaTarget); + Assert.notNull(result, "Expected DataflowTaskExecutionDao for " + schemaTarget); + return result; + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDao.java index c94bcb79e1..23af2b762f 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDao.java @@ -15,6 +15,7 @@ */ package org.springframework.cloud.dataflow.server.repository; +import java.util.Map; import java.util.Set; import org.springframework.cloud.dataflow.core.TaskManifest; @@ -24,10 +25,12 @@ * Data access object used for manipulating task manifests * * @author Michael Minella + * @author Corneil du Plessis * @since 2.3 */ public interface DataflowTaskExecutionMetadataDao { + /** * Saves a {@code TaskManifest} related to the supplied {@code TaskExecution} * @@ -51,6 +54,13 @@ public interface DataflowTaskExecutionMetadataDao { */ TaskManifest findManifestById(Long id); + /** + * Returns a collection of manifests mapped by id for the supplied ids. + * @param ids list of task execution ids. + * @return map of manifests with id as key. + */ + Map findManifestByIds(Set ids); + /** * Deletes the task manifest records associated with the collection of task execution ids provided. * diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDaoContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDaoContainer.java new file mode 100644 index 0000000000..194a75663d --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDaoContainer.java @@ -0,0 +1,47 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.repository; + +import java.util.HashMap; +import java.util.Map; + +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Provide container of DataflowTaskExecutionMetadataDao for each schema target; + * @author Corneil du Plessis + */ +public class DataflowTaskExecutionMetadataDaoContainer { + private final Map dataflowTaskExecutionMetadataDaos = new HashMap<>(); + + public DataflowTaskExecutionMetadataDaoContainer() { + } + + public void add(String schemaTarget, DataflowTaskExecutionMetadataDao dao) { + dataflowTaskExecutionMetadataDaos.put(schemaTarget, dao); + } + + public DataflowTaskExecutionMetadataDao get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + DataflowTaskExecutionMetadataDao result = dataflowTaskExecutionMetadataDaos.get(schemaTarget); + Assert.notNull(result, "Expected DataflowTaskExecutionMetadataDao for " + schemaTarget); + return result; + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDefinitionReader.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDefinitionReader.java new file mode 100644 index 0000000000..bbe6ebfedc --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDefinitionReader.java @@ -0,0 +1,36 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.repository; + +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.core.TaskDefinition; + +/** + * Provide a simple interface for reading Task Definitions when required by Aggregate Task Explorer + * @author Corneil du Plessis + */ +public class DefaultTaskDefinitionReader implements TaskDefinitionReader { + private final TaskDefinitionRepository taskDefinitionRepository; + + public DefaultTaskDefinitionReader(TaskDefinitionRepository taskDefinitionRepository) { + this.taskDefinitionRepository = taskDefinitionRepository; + } + + @Override + public TaskDefinition findTaskDefinition(String taskName) { + return taskDefinitionRepository.findByTaskName(taskName); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDeploymentReader.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDeploymentReader.java new file mode 100644 index 0000000000..0806660aeb --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDeploymentReader.java @@ -0,0 +1,46 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.repository; + +import org.springframework.cloud.dataflow.aggregate.task.TaskDeploymentReader; +import org.springframework.cloud.dataflow.core.TaskDeployment; + +/** + * Provide a simple interface for reading Task deployments when required by Aggregate Task Explorer + * @author Corneil du Plessis + */ +public class DefaultTaskDeploymentReader implements TaskDeploymentReader { + private final TaskDeploymentRepository taskDeploymentRepository; + + public DefaultTaskDeploymentReader(TaskDeploymentRepository taskDeploymentRepository) { + this.taskDeploymentRepository = taskDeploymentRepository; + } + + @Override + public TaskDeployment getDeployment(String externalTaskId) { + return taskDeploymentRepository.findByTaskDeploymentId(externalTaskId); + } + + @Override + public TaskDeployment getDeployment(String externalTaskId, String platform) { + return taskDeploymentRepository.findByTaskDeploymentIdAndPlatformName(externalTaskId, platform); + } + + @Override + public TaskDeployment findByDefinitionName(String definitionName) { + return taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(definitionName); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java new file mode 100644 index 0000000000..0d3328a67d --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java @@ -0,0 +1,1081 @@ +/* + * Copyright 2019-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository; + +import java.lang.reflect.Field; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; +import java.util.stream.Collectors; + +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobInstance; +import org.springframework.batch.core.JobParameter; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.launch.NoSuchJobException; +import org.springframework.batch.core.launch.NoSuchJobExecutionException; +import org.springframework.batch.core.launch.NoSuchJobInstanceException; +import org.springframework.batch.core.repository.dao.JdbcJobExecutionDao; +import org.springframework.batch.item.database.Order; +import org.springframework.batch.item.database.PagingQueryProvider; +import org.springframework.batch.item.database.support.AbstractSqlPagingQueryProvider; +import org.springframework.batch.item.database.support.Db2PagingQueryProvider; +import org.springframework.batch.item.database.support.OraclePagingQueryProvider; +import org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean; +import org.springframework.batch.item.database.support.SqlPagingQueryUtils; +import org.springframework.batch.item.database.support.SqlServerPagingQueryProvider; +import org.springframework.cloud.dataflow.core.DataFlowPropertyKeys; +import org.springframework.cloud.dataflow.core.database.support.DatabaseType; +import org.springframework.cloud.dataflow.rest.job.JobInstanceExecutions; +import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.server.batch.JobService; +import org.springframework.cloud.dataflow.server.converter.DateToStringConverter; +import org.springframework.cloud.dataflow.server.converter.StringToDateConverter; +import org.springframework.cloud.dataflow.server.service.JobServiceContainer; +import org.springframework.cloud.dataflow.server.service.impl.OffsetOutOfBoundsException; +import org.springframework.core.convert.support.ConfigurableConversionService; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.core.env.Environment; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.convert.Jsr310Converters; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.ResultSetExtractor; +import org.springframework.jdbc.core.RowCallbackHandler; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.ReflectionUtils; +import org.springframework.util.StringUtils; + +/** + * Stores job execution information to a JDBC DataSource. Mirrors the {@link JdbcJobExecutionDao} + * but contains Spring Cloud Data Flow specific operations. This functionality might + * be migrated to Spring Batch itself eventually. + * + * @author Corneil du Plessis + * @since 2.11.0 + */ +public class JdbcAggregateJobQueryDao implements AggregateJobQueryDao { + + private final static Logger LOG = LoggerFactory.getLogger(JdbcAggregateJobQueryDao.class); + + private static final String GET_COUNT = "SELECT COUNT(1) from AGGREGATE_JOB_EXECUTION"; + + private static final String GET_COUNT_BY_DATE = "SELECT COUNT(1) from AGGREGATE_JOB_EXECUTION WHERE START_TIME BETWEEN ? AND ?"; + + private static final String GET_COUNT_BY_JOB_NAME = "SELECT COUNT(E.JOB_EXECUTION_ID) from AGGREGATE_JOB_INSTANCE I" + + " JOIN AGGREGATE_JOB_EXECUTION E ON I.JOB_INSTANCE_ID=E.JOB_INSTANCE_ID AND I.SCHEMA_TARGET=E.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_BATCH B ON E.JOB_EXECUTION_ID = B.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = B.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_EXECUTION T ON B.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND B.SCHEMA_TARGET = T.SCHEMA_TARGET" + + " WHERE I.JOB_NAME LIKE ?"; + + private static final String GET_COUNT_BY_STATUS = "SELECT COUNT(E.JOB_EXECUTION_ID) from AGGREGATE_JOB_INSTANCE I" + + " JOIN AGGREGATE_JOB_EXECUTION E ON I.JOB_INSTANCE_ID=E.JOB_INSTANCE_ID AND I.SCHEMA_TARGET=E.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_BATCH B ON E.JOB_EXECUTION_ID = B.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = B.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_EXECUTION T ON B.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND B.SCHEMA_TARGET = T.SCHEMA_TARGET" + + " WHERE E.STATUS = ?"; + + private static final String GET_COUNT_BY_JOB_INSTANCE_ID = "SELECT COUNT(E.JOB_EXECUTION_ID) from AGGREGATE_JOB_INSTANCE I" + + " JOIN AGGREGATE_JOB_EXECUTION E ON I.JOB_INSTANCE_ID=E.JOB_INSTANCE_ID AND I.SCHEMA_TARGET=E.SCHEMA_TARGET" + + " WHERE I.JOB_INSTANCE_ID = ? AND I.SCHEMA_TARGET = ?"; + + private static final String GET_COUNT_BY_TASK_EXECUTION_ID = "SELECT COUNT(T.TASK_EXECUTION_ID) FROM AGGREGATE_JOB_EXECUTION E" + + " JOIN AGGREGATE_TASK_BATCH B ON E.JOB_EXECUTION_ID = B.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = B.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_EXECUTION T ON B.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND B.SCHEMA_TARGET = T.SCHEMA_TARGET" + + " WHERE T.TASK_EXECUTION_ID = ? AND T.SCHEMA_TARGET = ?"; + + private static final String GET_COUNT_BY_JOB_NAME_AND_STATUS = "SELECT COUNT(E.JOB_EXECUTION_ID) FROM AGGREGATE_JOB_INSTANCE I" + + " JOIN AGGREGATE_JOB_EXECUTION E ON I.JOB_INSTANCE_ID = E.JOB_INSTANCE_ID AND I.SCHEMA_TARGET = E.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_BATCH B ON E.JOB_EXECUTION_ID = B.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = B.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_EXECUTION T ON B.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND B.SCHEMA_TARGET = T.SCHEMA_TARGET" + + " WHERE I.JOB_NAME LIKE ? AND E.STATUS = ?"; + + private static final String FIELDS = "E.JOB_EXECUTION_ID as JOB_EXECUTION_ID, E.START_TIME as START_TIME," + + " E.END_TIME as END_TIME, E.STATUS as STATUS, E.EXIT_CODE as EXIT_CODE, E.EXIT_MESSAGE as EXIT_MESSAGE," + + " E.CREATE_TIME as CREATE_TIME, E.LAST_UPDATED as LAST_UPDATED, E.VERSION as VERSION," + + " I.JOB_INSTANCE_ID as JOB_INSTANCE_ID, I.JOB_NAME as JOB_NAME, T.TASK_EXECUTION_ID as TASK_EXECUTION_ID," + + " E.SCHEMA_TARGET as SCHEMA_TARGET"; + + private static final String FIELDS_WITH_STEP_COUNT = FIELDS + + ", (SELECT COUNT(*) FROM AGGREGATE_STEP_EXECUTION S WHERE S.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID AND S.SCHEMA_TARGET = E.SCHEMA_TARGET) as STEP_COUNT"; + + private static final String GET_JOB_INSTANCE_BY_ID = "SELECT I.JOB_INSTANCE_ID, I.VERSION, I.JOB_NAME, I.JOB_KEY" + + " FROM AGGREGATE_JOB_INSTANCE I" + + " WHERE I.JOB_INSTANCE_ID = ? AND I.SCHEMA_TARGET = ?"; + + private static final String NAME_FILTER = "I.JOB_NAME LIKE ?"; + + private static final String DATE_RANGE_FILTER = "E.START_TIME BETWEEN ? AND ?"; + + private static final String JOB_INSTANCE_ID_FILTER = "I.JOB_INSTANCE_ID = ? AND I.SCHEMA_TARGET = ?"; + + private static final String STATUS_FILTER = "E.STATUS = ?"; + + private static final String NAME_AND_STATUS_FILTER = "I.JOB_NAME LIKE ? AND E.STATUS = ?"; + + private static final String TASK_EXECUTION_ID_FILTER = + "B.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID AND B.SCHEMA_TARGET = E.SCHEMA_TARGET AND B.TASK_EXECUTION_ID = ? AND E.SCHEMA_TARGET = ?"; + + private static final String FROM_CLAUSE_TASK_EXEC_BATCH = "JOIN AGGREGATE_TASK_BATCH B ON E.JOB_EXECUTION_ID = B.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = B.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_EXECUTION T ON B.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND B.SCHEMA_TARGET = T.SCHEMA_TARGET"; + + private static final String FIND_PARAMS_FROM_ID2 = "SELECT JOB_EXECUTION_ID, KEY_NAME, TYPE_CD, " + + "STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING, 'boot2' as SCHEMA_TARGET from %PREFIX%JOB_EXECUTION_PARAMS where JOB_EXECUTION_ID = ?"; + + private static final String FIND_PARAMS_FROM_ID3 = "SELECT JOB_EXECUTION_ID, PARAMETER_NAME, PARAMETER_TYPE, PARAMETER_VALUE, IDENTIFYING, 'boot3' as SCHEMA_TARGET" + + " from %PREFIX%JOB_EXECUTION_PARAMS where JOB_EXECUTION_ID = ?"; + + private static final String FIND_JOB_BY = "SELECT I.JOB_INSTANCE_ID as JOB_INSTANCE_ID, I.JOB_NAME as JOB_NAME, I.SCHEMA_TARGET as SCHEMA_TARGET," + + " E.JOB_EXECUTION_ID as JOB_EXECUTION_ID, E.START_TIME as START_TIME, E.END_TIME as END_TIME, E.STATUS as STATUS, E.EXIT_CODE as EXIT_CODE, E.EXIT_MESSAGE as EXIT_MESSAGE, E.CREATE_TIME as CREATE_TIME," + + " E.LAST_UPDATED as LAST_UPDATED, E.VERSION as VERSION, T.TASK_EXECUTION_ID as TASK_EXECUTION_ID," + + " (SELECT COUNT(*) FROM AGGREGATE_STEP_EXECUTION S WHERE S.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID AND S.SCHEMA_TARGET = E.SCHEMA_TARGET) as STEP_COUNT" + + " from AGGREGATE_JOB_INSTANCE I" + + " JOIN AGGREGATE_JOB_EXECUTION E ON I.JOB_INSTANCE_ID = E.JOB_INSTANCE_ID AND I.SCHEMA_TARGET = E.SCHEMA_TARGET" + + " LEFT OUTER JOIN AGGREGATE_TASK_BATCH TT ON E.JOB_EXECUTION_ID = TT.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = TT.SCHEMA_TARGET" + + " LEFT OUTER JOIN AGGREGATE_TASK_EXECUTION T ON TT.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND TT.SCHEMA_TARGET = T.SCHEMA_TARGET"; + + private static final String FIND_CTR_STATUS = "SELECT T.TASK_EXECUTION_ID as TASK_EXECUTION_ID, J.EXIT_CODE as CTR_STATUS" + + " from AGGREGATE_TASK_EXECUTION T" + + " JOIN AGGREGATE_TASK_BATCH TB ON TB.TASK_EXECUTION_ID=T.TASK_EXECUTION_ID AND TB.SCHEMA_TARGET=T.SCHEMA_TARGET" + + " JOIN AGGREGATE_JOB_EXECUTION J ON J.JOB_EXECUTION_ID=TB.JOB_EXECUTION_ID AND J.SCHEMA_TARGET=TB.SCHEMA_TARGET" + + " WHERE T.TASK_EXECUTION_ID in (:taskExecutionIds) " + + " AND T.SCHEMA_TARGET = ':schemaTarget'" + + " AND (select count(*) from AGGREGATE_TASK_EXECUTION CT" + + " where (select count(*) from AGGREGATE_TASK_EXECUTION_PARAMS where" + + " CT.TASK_EXECUTION_ID = TASK_EXECUTION_ID and" + + " CT.SCHEMA_TARGET = SCHEMA_TARGET and" + + " TASK_PARAM = '--spring.cloud.task.parent-schema-target=:schemaTarget') > 0" + + " AND CT.PARENT_EXECUTION_ID = T.TASK_EXECUTION_ID) > 0"; + + private static final String FIND_JOB_BY_NAME_INSTANCE_ID = FIND_JOB_BY + + " where I.JOB_NAME = ? AND I.JOB_INSTANCE_ID = ?"; + + private static final String FIND_JOB_BY_INSTANCE_ID_SCHEMA = FIND_JOB_BY + + " where I.JOB_INSTANCE_ID = ? AND I.SCHEMA_TARGET = ?"; + + private static final String FIND_JOBS_FIELDS = "I.JOB_INSTANCE_ID as JOB_INSTANCE_ID, I.JOB_NAME as JOB_NAME, I.SCHEMA_TARGET as SCHEMA_TARGET," + + " E.JOB_EXECUTION_ID as JOB_EXECUTION_ID, E.START_TIME as START_TIME, E.END_TIME as END_TIME, E.STATUS as STATUS, E.EXIT_CODE as EXIT_CODE, E.EXIT_MESSAGE as EXIT_MESSAGE, E.CREATE_TIME as CREATE_TIME," + + " E.LAST_UPDATED as LAST_UPDATED, E.VERSION as VERSION, T.TASK_EXECUTION_ID as TASK_EXECUTION_ID"; + + private static final String FIND_JOBS_FROM = "LEFT OUTER JOIN AGGREGATE_TASK_BATCH TT ON E.JOB_EXECUTION_ID = TT.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = TT.SCHEMA_TARGET" + + " LEFT OUTER JOIN AGGREGATE_TASK_EXECUTION T ON TT.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND TT.SCHEMA_TARGET = T.SCHEMA_TARGET"; + + private static final String FIND_JOBS_WHERE = "I.JOB_NAME LIKE ?"; + + private static final String FIND_BY_ID_SCHEMA = "E.JOB_EXECUTION_ID = ? AND E.SCHEMA_TARGET = ?"; + + private static final String ROW_NUMBER_OPTIMIZATION_ENABLED_PROPERTY = DataFlowPropertyKeys.PREFIX + "task.jdbc.row-number-optimization.enabled"; + + private final PagingQueryProvider allExecutionsPagingQueryProvider; + + private final PagingQueryProvider byJobNameAndStatusPagingQueryProvider; + + private final PagingQueryProvider byStatusPagingQueryProvider; + + private final PagingQueryProvider byJobNameWithStepCountPagingQueryProvider; + + private final PagingQueryProvider executionsByDateRangeWithStepCountPagingQueryProvider; + + private final PagingQueryProvider byJobInstanceIdWithStepCountPagingQueryProvider; + + private final PagingQueryProvider byTaskExecutionIdWithStepCountPagingQueryProvider; + + private final PagingQueryProvider jobExecutionsPagingQueryProviderByName; + + private final PagingQueryProvider allExecutionsPagingQueryProviderNoStepCount; + + private final PagingQueryProvider byJobNamePagingQueryProvider; + + private final PagingQueryProvider byJobExecutionIdAndSchemaPagingQueryProvider; + + private final DataSource dataSource; + + private final JdbcTemplate jdbcTemplate; + + private final SchemaService schemaService; + + private final JobServiceContainer jobServiceContainer; + + private final ConfigurableConversionService conversionService = new DefaultConversionService(); + + private final boolean useRowNumberOptimization; + + public JdbcAggregateJobQueryDao( + DataSource dataSource, + SchemaService schemaService, + JobServiceContainer jobServiceContainer, + Environment environment) throws Exception { + this.dataSource = dataSource; + this.jdbcTemplate = new JdbcTemplate(dataSource); + this.schemaService = schemaService; + this.jobServiceContainer = jobServiceContainer; + this.useRowNumberOptimization = determineUseRowNumberOptimization(environment); + + conversionService.addConverter(new DateToStringConverter()); + conversionService.addConverter(new StringToDateConverter()); + Jsr310Converters.getConvertersToRegister().forEach(conversionService::addConverter); + + allExecutionsPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, null); + executionsByDateRangeWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, DATE_RANGE_FILTER); + allExecutionsPagingQueryProviderNoStepCount = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, null); + byStatusPagingQueryProvider = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, STATUS_FILTER); + byJobNameAndStatusPagingQueryProvider = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, NAME_AND_STATUS_FILTER); + byJobNamePagingQueryProvider = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, NAME_FILTER); + byJobNameWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, NAME_FILTER); + byJobInstanceIdWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, JOB_INSTANCE_ID_FILTER); + byTaskExecutionIdWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, TASK_EXECUTION_ID_FILTER); + jobExecutionsPagingQueryProviderByName = getPagingQueryProvider(FIND_JOBS_FIELDS, FIND_JOBS_FROM, FIND_JOBS_WHERE, Collections.singletonMap("E.JOB_EXECUTION_ID", Order.DESCENDING)); + byJobExecutionIdAndSchemaPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, FIND_BY_ID_SCHEMA); + } + + private boolean determineUseRowNumberOptimization(Environment environment) { + boolean supportsRowNumberFunction = determineSupportsRowNumberFunction(this.dataSource); + boolean rowNumberOptimizationEnabled = environment.getProperty(ROW_NUMBER_OPTIMIZATION_ENABLED_PROPERTY , Boolean.class, Boolean.TRUE); + return supportsRowNumberFunction && rowNumberOptimizationEnabled; + } + + @Override + public Page listJobInstances(String jobName, Pageable pageable) throws NoSuchJobException { + int total = countJobExecutions(jobName); + if (total == 0) { + throw new NoSuchJobException("No Job with that name either current or historic: [" + jobName + "]"); + } + List taskJobInstancesForJobName = getTaskJobInstancesForJobName(jobName, pageable); + return new PageImpl<>(taskJobInstancesForJobName, pageable, total); + + } + + @Override + public void populateCtrStatus(Collection aggregateTaskExecutions) { + Map> targets = aggregateTaskExecutions.stream().collect(Collectors.groupingBy(aggregateTaskExecution -> aggregateTaskExecution.getSchemaTarget())); + final AtomicInteger updated = new AtomicInteger(0); + for(Map.Entry> entry : targets.entrySet()) { + String target = entry.getKey(); + Map aggregateTaskExecutionMap = entry.getValue().stream() + .collect(Collectors.toMap(AggregateTaskExecution::getExecutionId, Function.identity())); + String ids = aggregateTaskExecutionMap.keySet() + .stream() + .map(Object::toString) + .collect(Collectors.joining(",")); + String sql = FIND_CTR_STATUS.replace(":taskExecutionIds", ids).replace(":schemaTarget", target); + jdbcTemplate.query(sql, rs -> { + Long id = rs.getLong("TASK_EXECUTION_ID"); + String ctrStatus = rs.getString("CTR_STATUS"); + LOG.debug("populateCtrStatus:{}={}", id, ctrStatus); + AggregateTaskExecution execution = aggregateTaskExecutionMap.get(id); + Assert.notNull(execution, "Expected AggregateTaskExecution for " + id + " from " + ids); + updated.incrementAndGet(); + execution.setCtrTaskStatus(ctrStatus); + }); + } + LOG.debug("updated {} ctr statuses", updated); + } + + @Override + public JobInstanceExecutions getJobInstanceExecution(String jobName, long instanceId) { + LOG.debug("getJobInstanceExecution:{}:{}:{}", jobName, instanceId, FIND_JOB_BY_NAME_INSTANCE_ID); + List executions = jdbcTemplate.query(FIND_JOB_BY_NAME_INSTANCE_ID, new JobInstanceExecutionsExtractor(true), jobName, instanceId); + if (executions == null || executions.isEmpty()) { + return null; + } else if (executions.size() > 1) { + throw new RuntimeException("Expected a single JobInstanceExecutions not " + executions.size()); + } + return executions.get(0); + } + + @Override + public JobInstanceExecutions getJobInstanceExecutions(long jobInstanceId, String schemaTarget) { + List executions = jdbcTemplate.query(FIND_JOB_BY_INSTANCE_ID_SCHEMA, new JobInstanceExecutionsExtractor(true), jobInstanceId, schemaTarget); + if (executions == null || executions.isEmpty()) { + return null; + } else if (executions.size() > 1) { + throw new RuntimeException("Expected a single JobInstanceExecutions not " + executions.size()); + } + JobInstanceExecutions jobInstanceExecution = executions.get(0); + if (!ObjectUtils.isEmpty(jobInstanceExecution.getTaskJobExecutions())) { + jobInstanceExecution.getTaskJobExecutions().forEach((execution) -> + jobServiceContainer.get(execution.getSchemaTarget()).addStepExecutions(execution.getJobExecution()) + ); + } + return jobInstanceExecution; + } + + @Override + public JobInstance getJobInstance(long id, String schemaTarget) throws NoSuchJobInstanceException { + List instances = jdbcTemplate.query(GET_JOB_INSTANCE_BY_ID, new JobInstanceExtractor(), id, schemaTarget); + if (ObjectUtils.isEmpty(instances)) { + throw new NoSuchJobInstanceException(String.format("JobInstance with id=%d does not exist", id)); + } else if (instances.size() > 1) { + throw new NoSuchJobInstanceException(String.format("More than one Job Instance exists for ID %d ", id)); + } + return instances.get(0); + } + + @Override + public Page listJobExecutions(String jobName, BatchStatus status, Pageable pageable) throws NoSuchJobExecutionException { + int total = countJobExecutions(jobName, status); + List executions = getJobExecutions(jobName, status, getPageOffset(pageable), pageable.getPageSize()); + Assert.isTrue(total >= executions.size(), () -> "Expected total at least " + executions.size() + " not " + total); + return new PageImpl<>(executions, pageable, total); + } + + @Override + public Page listJobExecutionsBetween(Date fromDate, Date toDate, Pageable pageable) { + int total = countJobExecutionsByDate(fromDate, toDate); + List executions = total > 0 + ? getTaskJobExecutionsByDate(fromDate, toDate, getPageOffset(pageable), pageable.getPageSize()) + : Collections.emptyList(); + return new PageImpl<>(executions, pageable, total); + } + + + @Override + public Page listJobExecutionsWithSteps(Pageable pageable) { + int total = countJobExecutions(); + List jobExecutions = total > 0 + ? getJobExecutionsWithStepCount(getPageOffset(pageable), pageable.getPageSize()) + : Collections.emptyList(); + return new PageImpl<>(jobExecutions, pageable, total); + } + + @Override + public Page listJobExecutionsWithStepCount(Pageable pageable) { + int total = countJobExecutions(); + List jobExecutions = total > 0 + ? getJobExecutionsWithStepCount(getPageOffset(pageable), pageable.getPageSize()) + : Collections.emptyList(); + return new PageImpl<>(jobExecutions, pageable, total); + } + + @Override + public Page listJobExecutionsForJobWithStepCountFilteredByJobInstanceId(int jobInstanceId, String schemaTarget, Pageable pageable) { + int total = countJobExecutionsByInstanceId(jobInstanceId, schemaTarget); + List jobExecutions = total > 0 + ? getJobExecutionsWithStepCountFilteredByJobInstanceId(jobInstanceId, schemaTarget, getPageOffset(pageable), pageable.getPageSize()) + : Collections.emptyList(); + return new PageImpl<>(jobExecutions, pageable, total); + } + + + @Override + public Page listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId(int taskExecutionId, String schemaTarget, Pageable pageable) { + int total = countJobExecutionsByTaskExecutionId(taskExecutionId, schemaTarget); + List jobExecutions = total > 0 + ? getJobExecutionsWithStepCountFilteredByTaskExecutionId(taskExecutionId, schemaTarget, getPageOffset(pageable), pageable.getPageSize()) + : Collections.emptyList(); + return new PageImpl<>(jobExecutions, pageable, total); + } + + @Override + public Page listJobExecutionsForJobWithStepCount(String jobName, Pageable pageable) throws NoSuchJobException { + int total = countJobExecutions(jobName); + if(total == 0) { + throw new NoSuchJobException("No Job with that name either current or historic: [" + jobName + "]"); + } + List jobExecutions = total > 0 + ? getJobExecutionsWithStepCount(jobName, getPageOffset(pageable), pageable.getPageSize()) + : Collections.emptyList(); + return new PageImpl<>(jobExecutions, pageable, total); + } + + @Override + public TaskJobExecution getJobExecution(long jobExecutionId, String schemaTarget) throws NoSuchJobExecutionException { + List jobExecutions = getJobExecutionPage(jobExecutionId, schemaTarget); + if (jobExecutions.isEmpty()) { + throw new NoSuchJobExecutionException(String.format("Job id %s for schema target %s not found", jobExecutionId, schemaTarget)); + } + if (jobExecutions.size() > 1) { + LOG.debug("Too many job executions:{}", jobExecutions); + LOG.warn("Expected only 1 job for {}: not {}", jobExecutionId, jobExecutions.size()); + } + + TaskJobExecution taskJobExecution = jobExecutions.get(0); + JobService jobService = jobServiceContainer.get(taskJobExecution.getSchemaTarget()); + jobService.addStepExecutions(taskJobExecution.getJobExecution()); + return taskJobExecution; + } + + private List getJobExecutionPage(long jobExecutionId, String schemaTarget) { + return queryForProvider( + byJobExecutionIdAndSchemaPagingQueryProvider, + new JobExecutionRowMapper(true), + 0, + 2, + jobExecutionId, + schemaTarget + ); + } + + private int countJobExecutions() { + LOG.debug("countJobExecutions:{}", GET_COUNT); + Integer count = jdbcTemplate.queryForObject(GET_COUNT, Integer.class); + return count != null ? count : 0; + } + + private int countJobExecutionsByDate(Date fromDate, Date toDate) { + Assert.notNull(fromDate, "fromDate must not be null"); + Assert.notNull(toDate, "toDate must not be null"); + LOG.debug("countJobExecutionsByDate:{}:{}:{}", fromDate, toDate, GET_COUNT_BY_DATE); + Integer count = jdbcTemplate.queryForObject(GET_COUNT_BY_DATE, Integer.class, fromDate, toDate); + return count != null ? count : 0; + } + + private int countJobExecutions(String jobName) { + LOG.debug("countJobExecutions:{}:{}", jobName, GET_COUNT_BY_JOB_NAME); + Integer count = jdbcTemplate.queryForObject(GET_COUNT_BY_JOB_NAME, Integer.class, jobName); + return count != null ? count : 0; + } + + private int countJobExecutions(BatchStatus status) { + LOG.debug("countJobExecutions:{}:{}", status, GET_COUNT_BY_STATUS); + Integer count = jdbcTemplate.queryForObject(GET_COUNT_BY_STATUS, Integer.class, status.name()); + return count != null ? count : 0; + } + + private int countJobExecutions(String jobName, BatchStatus status) { + LOG.debug("countJobExecutions:{}:{}", jobName, status); + Integer count; + if (StringUtils.hasText(jobName) && status != null) { + LOG.debug("countJobExecutions:{}:{}:{}", jobName, status, GET_COUNT_BY_JOB_NAME_AND_STATUS); + count = jdbcTemplate.queryForObject(GET_COUNT_BY_JOB_NAME_AND_STATUS, Integer.class, jobName, status.name()); + } else if (status != null) { + LOG.debug("countJobExecutions:{}:{}", status, GET_COUNT_BY_STATUS); + count = jdbcTemplate.queryForObject(GET_COUNT_BY_STATUS, Integer.class, status.name()); + } else if (StringUtils.hasText(jobName)) { + LOG.debug("countJobExecutions:{}:{}", jobName, GET_COUNT_BY_JOB_NAME); + count = jdbcTemplate.queryForObject(GET_COUNT_BY_JOB_NAME, Integer.class, jobName); + } else { + count = jdbcTemplate.queryForObject(GET_COUNT, Integer.class); + } + return count != null ? count : 0; + } + + private int countJobExecutionsByInstanceId(int jobInstanceId, String schemaTarget) { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + LOG.debug("countJobExecutionsByInstanceId:{}:{}:{}", jobInstanceId, schemaTarget, GET_COUNT_BY_JOB_INSTANCE_ID); + Integer count = jdbcTemplate.queryForObject(GET_COUNT_BY_JOB_INSTANCE_ID, Integer.class, jobInstanceId, schemaTarget); + return count != null ? count : 0; + } + + private int countJobExecutionsByTaskExecutionId(int taskExecutionId, String schemaTarget) { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + LOG.debug("countJobExecutionsByTaskExecutionId:{}:{}:{}", taskExecutionId, schemaTarget, GET_COUNT_BY_TASK_EXECUTION_ID); + Integer count = jdbcTemplate.queryForObject(GET_COUNT_BY_TASK_EXECUTION_ID, Integer.class, taskExecutionId, schemaTarget); + return count != null ? count : 0; + } + + private List getJobExecutionsWithStepCountFilteredByJobInstanceId( + int jobInstanceId, + String schemaTarget, + int start, + int count + ) { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + return queryForProvider( + byJobInstanceIdWithStepCountPagingQueryProvider, + new JobExecutionRowMapper(true), + start, + count, + jobInstanceId, + schemaTarget + ); + } + + private List getJobExecutionsWithStepCountFilteredByTaskExecutionId( + int taskExecutionId, + String schemaTarget, + int start, + int count + ) { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + return queryForProvider( + byTaskExecutionIdWithStepCountPagingQueryProvider, + new JobExecutionRowMapper(true), + start, + count, + taskExecutionId, + schemaTarget + ); + } + + private List getJobExecutions(String jobName, BatchStatus status, int start, int count) throws NoSuchJobExecutionException { + if (StringUtils.hasText(jobName) && status != null) { + return queryForProvider(byJobNameAndStatusPagingQueryProvider, new JobExecutionRowMapper(false), start, count, jobName, status.name()); + } else if (status != null) { + return queryForProvider(byStatusPagingQueryProvider, new JobExecutionRowMapper(false), start, count, status.name()); + } else if (StringUtils.hasText(jobName)) { + return queryForProvider(byJobNamePagingQueryProvider, new JobExecutionRowMapper(false), start, count, jobName); + } + return queryForProvider(allExecutionsPagingQueryProviderNoStepCount, new JobExecutionRowMapper(false), start, count); + } + + private List getJobExecutionsWithStepCount(String jobName, int start, int count) { + return queryForProvider(byJobNameWithStepCountPagingQueryProvider, new JobExecutionRowMapper(true), start, count, jobName); + } + + public List getJobExecutionsWithStepCount(int start, int count) { + return queryForProvider(allExecutionsPagingQueryProvider, new JobExecutionRowMapper(true), start, count); + } + + protected JobParameters getJobParameters(Long executionId, String schemaTarget) { + final Map map = new HashMap<>(); + final SchemaVersionTarget schemaVersionTarget = schemaService.getTarget(schemaTarget); + boolean boot2 = AppBootSchemaVersion.BOOT2 == schemaVersionTarget.getSchemaVersion(); + RowCallbackHandler handler; + if (boot2) { + handler = rs -> { + String keyName = rs.getString("KEY_NAME"); + JobParameter.ParameterType type = JobParameter.ParameterType.valueOf(rs.getString("TYPE_CD")); + boolean identifying = rs.getString("IDENTIFYING").equalsIgnoreCase("Y"); + JobParameter value; + switch (type) { + case STRING: + value = new JobParameter(rs.getString("STRING_VAL"), identifying); + break; + case LONG: + long longValue = rs.getLong("LONG_VAL"); + value = new JobParameter(rs.wasNull() ? null : longValue, identifying); + break; + case DOUBLE: + double doubleValue = rs.getDouble("DOUBLE_VAL"); + value = new JobParameter(rs.wasNull() ? null : doubleValue, identifying); + break; + case DATE: + value = new JobParameter(rs.getTimestamp("DATE_VAL"), identifying); + break; + default: + LOG.error("Unknown type:{} for {}", type, keyName); + return; + } + map.put(keyName, value); + }; + } else { + handler = rs -> { + String parameterName = rs.getString("PARAMETER_NAME"); + Class parameterType = null; + try { + parameterType = Class.forName(rs.getString("PARAMETER_TYPE")); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + String stringValue = rs.getString("PARAMETER_VALUE"); + boolean identifying = rs.getString("IDENTIFYING").equalsIgnoreCase("Y"); + Object typedValue = conversionService.convert(stringValue, parameterType); + JobParameter value; + if (typedValue instanceof String) { + value = new JobParameter((String) typedValue, identifying); + } else if (typedValue instanceof Date) { + value = new JobParameter((Date) typedValue, identifying); + } else if (typedValue instanceof Double) { + value = new JobParameter((Double) typedValue, identifying); + } else if (typedValue instanceof Long) { + value = new JobParameter((Long) typedValue, identifying); + } else if (typedValue instanceof Number) { + value = new JobParameter(((Number) typedValue).doubleValue(), identifying); + } else if (typedValue instanceof Instant) { + value = new JobParameter(new Date(((Instant) typedValue).toEpochMilli()), identifying); + } else { + + value = new JobParameter(typedValue != null ? typedValue.toString() : null, identifying); + } + map.put(parameterName, value); + }; + } + + jdbcTemplate.query( + getQuery( + boot2 ? FIND_PARAMS_FROM_ID2 : FIND_PARAMS_FROM_ID3, + schemaVersionTarget.getBatchPrefix() + ), + handler, + executionId + ); + return new JobParameters(map); + } + + private > List queryForProvider(P pagingQueryProvider, M mapper, int start, int count, Object... arguments) { + if (start <= 0) { + String sql = pagingQueryProvider.generateFirstPageQuery(count); + if (LOG.isDebugEnabled()) { + LOG.debug("queryFirstPage:{}:{}:{}:{}", sql, start, count, Arrays.asList(arguments)); + } + return jdbcTemplate.query(sql, mapper, arguments); + } else { + try { + String sqlJump = pagingQueryProvider.generateJumpToItemQuery(start, count); + if (LOG.isDebugEnabled()) { + LOG.debug("queryJumpToItem:{}:{}:{}:{}", sqlJump, start, count, Arrays.asList(arguments)); + } + Long startValue; + startValue = jdbcTemplate.queryForObject(sqlJump, Long.class, arguments); + List args = new ArrayList<>(Arrays.asList(arguments)); + args.add(startValue); + String sql = pagingQueryProvider.generateRemainingPagesQuery(count); + if (LOG.isDebugEnabled()) { + LOG.debug("queryRemaining:{}:{}:{}:{}", sql, start, count, args); + } + return jdbcTemplate.query(sql, mapper, args.toArray()); + } catch (IncorrectResultSizeDataAccessException x) { + return Collections.emptyList(); + } + } + } + + private >> List queryForProvider(P pagingQueryProvider, R extractor, int start, int count, Object... arguments) { + if (start <= 0) { + String sql = pagingQueryProvider.generateFirstPageQuery(count); + if (LOG.isDebugEnabled()) { + LOG.debug("queryFirstPage:{}:{}:{}:{}", sql, start, count, Arrays.asList(arguments)); + } + return jdbcTemplate.query(sql, extractor, arguments); + } else { + String sqlJump = pagingQueryProvider.generateJumpToItemQuery(start, count); + if (LOG.isDebugEnabled()) { + LOG.debug("queryJumpToItem:{}:{}:{}:{}", sqlJump, start, count, Arrays.asList(arguments)); + } + Long startValue = jdbcTemplate.queryForObject(sqlJump, Long.class, arguments); + List args = new ArrayList<>(Arrays.asList(arguments)); + args.add(startValue); + String sql = pagingQueryProvider.generateRemainingPagesQuery(count); + if (LOG.isDebugEnabled()) { + LOG.debug("queryRemaining:{}:{}:{}:{}", sql, start, count, args); + } + return jdbcTemplate.query(sql, extractor, args.toArray()); + } + } + + private List getTaskJobInstancesForJobName(String jobName, Pageable pageable) { + Assert.notNull(pageable, "pageable must not be null"); + Assert.notNull(jobName, "jobName must not be null"); + int start = getPageOffset(pageable); + int count = pageable.getPageSize(); + return queryForProvider(jobExecutionsPagingQueryProviderByName, new JobInstanceExecutionsExtractor(false), start, count, jobName); + } + + private TaskJobExecution createJobExecutionFromResultSet(ResultSet rs, int row, boolean readStepCount) throws SQLException { + long taskExecutionId = rs.getLong("TASK_EXECUTION_ID"); + Long jobExecutionId = rs.getLong("JOB_EXECUTION_ID"); + JobExecution jobExecution; + String schemaTarget = rs.getString("SCHEMA_TARGET"); + JobParameters jobParameters = getJobParameters(jobExecutionId, schemaTarget); + + JobInstance jobInstance = new JobInstance(rs.getLong("JOB_INSTANCE_ID"), rs.getString("JOB_NAME")); + jobExecution = new JobExecution(jobInstance, jobParameters); + jobExecution.setId(jobExecutionId); + + jobExecution.setStartTime(rs.getTimestamp("START_TIME")); + jobExecution.setEndTime(rs.getTimestamp("END_TIME")); + jobExecution.setStatus(BatchStatus.valueOf(rs.getString("STATUS"))); + jobExecution.setExitStatus(new ExitStatus(rs.getString("EXIT_CODE"), rs.getString("EXIT_MESSAGE"))); + jobExecution.setCreateTime(rs.getTimestamp("CREATE_TIME")); + jobExecution.setLastUpdated(rs.getTimestamp("LAST_UPDATED")); + jobExecution.setVersion(rs.getInt("VERSION")); + + return readStepCount ? + new TaskJobExecution(taskExecutionId, jobExecution, true, rs.getInt("STEP_COUNT"), schemaTarget) : + new TaskJobExecution(taskExecutionId, jobExecution, true, schemaTarget); + } + + private List getTaskJobExecutionsByDate(Date startDate, Date endDate, int start, int count) { + return queryForProvider( + executionsByDateRangeWithStepCountPagingQueryProvider, + new JobExecutionRowMapper(true), + start, + count, + startDate, + endDate + ); + } + private class JobInstanceExtractor implements ResultSetExtractor> { + + @Override + public List extractData(ResultSet rs) throws SQLException, + DataAccessException { + List jobInstances = new ArrayList(); + while (rs.next()) { + jobInstances.add( new JobInstance(rs.getLong("JOB_INSTANCE_ID"), rs.getString("JOB_NAME"))); + } + return jobInstances; + } + } + + private class JobInstanceExecutionsExtractor implements ResultSetExtractor> { + final boolean readStepCount; + + public JobInstanceExecutionsExtractor(boolean readStepCount) { + this.readStepCount = readStepCount; + } + + @Override + public List extractData(ResultSet rs) throws SQLException, + DataAccessException { + final Map> taskJobExecutions = new HashMap<>(); + final Map jobInstances = new TreeMap<>(); + + while (rs.next()) { + Long id = rs.getLong("JOB_INSTANCE_ID"); + JobInstance jobInstance; + if (!jobInstances.containsKey(id)) { + jobInstance = new JobInstance(id, rs.getString("JOB_NAME")); + jobInstances.put(id, jobInstance); + } else { + jobInstance = jobInstances.get(id); + } + long taskId = rs.getLong("TASK_EXECUTION_ID"); + if (!rs.wasNull()) { + String schemaTarget = rs.getString("SCHEMA_TARGET"); + List executions = taskJobExecutions.computeIfAbsent(id, k -> new ArrayList<>()); + long jobExecutionId = rs.getLong("JOB_EXECUTION_ID"); + JobParameters jobParameters = getJobParameters(jobExecutionId, schemaTarget); + JobExecution jobExecution = new JobExecution(jobInstance, jobExecutionId, jobParameters, null); + + int stepCount = readStepCount ? rs.getInt("STEP_COUNT") : 0; + TaskJobExecution execution = new TaskJobExecution(taskId, jobExecution, true, stepCount, schemaTarget); + executions.add(execution); + } + } + return jobInstances.values() + .stream() + .map(jobInstance -> new JobInstanceExecutions(jobInstance, taskJobExecutions.get(jobInstance.getInstanceId()))) + .collect(Collectors.toList()); + } + + } + + class JobExecutionRowMapper implements RowMapper { + boolean readStepCount; + + JobExecutionRowMapper(boolean readStepCount) { + this.readStepCount = readStepCount; + } + + @Override + public TaskJobExecution mapRow(ResultSet rs, int rowNum) throws SQLException { + return createJobExecutionFromResultSet(rs, rowNum, readStepCount); + } + + } + + protected String getQuery(String base, String tablePrefix) { + return StringUtils.replace(base, "%PREFIX%", tablePrefix); + } + + private int getPageOffset(Pageable pageable) { + if (pageable.getOffset() > (long) Integer.MAX_VALUE) { + throw new OffsetOutOfBoundsException("The pageable offset requested for this query is greater than MAX_INT."); + } + return (int) pageable.getOffset(); + } + + /** + * @return a {@link PagingQueryProvider} for all job executions + * @throws Exception if page provider is not created. + */ + private PagingQueryProvider getPagingQueryProvider() throws Exception { + return getPagingQueryProvider(null, null, null, Collections.emptyMap()); + } + + /** + * @return a {@link PagingQueryProvider} for all job executions with the + * provided where clause + * @throws Exception if page provider is not created. + */ + private PagingQueryProvider getPagingQueryProvider(String whereClause) throws Exception { + return getPagingQueryProvider(null, null, whereClause, Collections.emptyMap()); + } + + /** + * @return a {@link PagingQueryProvider} with a where clause to narrow the + * query + * @throws Exception if page provider is not created. + */ + private PagingQueryProvider getPagingQueryProvider(String fromClause, String whereClause) throws Exception { + return getPagingQueryProvider(null, fromClause, whereClause, Collections.emptyMap()); + } + + private PagingQueryProvider getPagingQueryProvider(String fields, String fromClause, String whereClause) throws Exception { + return getPagingQueryProvider(fields, fromClause, whereClause, Collections.emptyMap()); + } + + /** + * @return a {@link PagingQueryProvider} with a where clause to narrow the + * query + * @throws Exception if page provider is not created. + */ + private PagingQueryProvider getPagingQueryProvider(String fields, String fromClause, String whereClause, Map sortKeys) throws Exception { + SqlPagingQueryProviderFactoryBean factory = new SafeSqlPagingQueryProviderFactoryBean(); + factory.setDataSource(dataSource); + fromClause = "AGGREGATE_JOB_INSTANCE I JOIN AGGREGATE_JOB_EXECUTION E ON I.JOB_INSTANCE_ID=E.JOB_INSTANCE_ID AND I.SCHEMA_TARGET=E.SCHEMA_TARGET" + (fromClause == null ? "" : " " + fromClause); + factory.setFromClause(fromClause); + if (fields == null) { + fields = FIELDS; + } + if (fields.contains("E.JOB_EXECUTION_ID") && this.useRowNumberOptimization) { + Order order = sortKeys.get("E.JOB_EXECUTION_ID"); + String orderString = (order == null || order == Order.DESCENDING) ? "DESC" : "ASC"; + fields += ", ROW_NUMBER() OVER (PARTITION BY E.JOB_EXECUTION_ID ORDER BY E.JOB_EXECUTION_ID " + orderString + ") as RN"; + } + factory.setSelectClause(fields); + if (sortKeys.isEmpty()) { + sortKeys = Collections.singletonMap("E.JOB_EXECUTION_ID", Order.DESCENDING); + } + factory.setSortKeys(sortKeys); + factory.setWhereClause(whereClause); + return factory.getObject(); + } + + private boolean determineSupportsRowNumberFunction(DataSource dataSource) { + try { + return DatabaseType.supportsRowNumberFunction(dataSource); + } + catch (Exception e) { + LOG.warn("Unable to determine if DB supports ROW_NUMBER() function (reason: " + e.getMessage() + ")", e); + } + return false; + } + + /** + * A {@link SqlPagingQueryProviderFactoryBean} specialization that overrides the {@code Oracle, MSSQL, and DB2} + * paging {@link SafeOraclePagingQueryProvider provider} with an implementation that properly handles sort aliases. + *

    NOTE: nested within the aggregate DAO as this is the only place that needs this specialization. + */ + static class SafeSqlPagingQueryProviderFactoryBean extends SqlPagingQueryProviderFactoryBean { + + private DataSource dataSource; + + @Override + public void setDataSource(DataSource dataSource) { + super.setDataSource(dataSource); + this.dataSource = dataSource; + } + + @Override + public PagingQueryProvider getObject() throws Exception { + PagingQueryProvider provider = super.getObject(); + if (provider instanceof OraclePagingQueryProvider) { + provider = new SafeOraclePagingQueryProvider((AbstractSqlPagingQueryProvider) provider, this.dataSource); + } + else if (provider instanceof SqlServerPagingQueryProvider) { + provider = new SafeSqlServerPagingQueryProvider((SqlServerPagingQueryProvider) provider, this.dataSource); + } + else if (provider instanceof Db2PagingQueryProvider) { + provider = new SafeDb2PagingQueryProvider((Db2PagingQueryProvider) provider, this.dataSource); + } + return provider; + } + + } + + /** + * A {@link AbstractSqlPagingQueryProvider paging provider} for {@code Oracle} that works around the fact that the + * Oracle provider in Spring Batch 4.x does not properly handle sort aliases when using nested {@code ROW_NUMBER} + * clauses. + */ + static class SafeOraclePagingQueryProvider extends AbstractSqlPagingQueryProvider { + + SafeOraclePagingQueryProvider(AbstractSqlPagingQueryProvider delegate, DataSource dataSource) { + // Have to use reflection to retrieve the provider fields + this.setFromClause(extractField(delegate, "fromClause", String.class)); + this.setWhereClause(extractField(delegate, "whereClause", String.class)); + this.setSortKeys(extractField(delegate, "sortKeys", Map.class)); + this.setSelectClause(extractField(delegate, "selectClause", String.class)); + this.setGroupClause(extractField(delegate, "groupClause", String.class)); + try { + this.init(dataSource); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + + private T extractField(AbstractSqlPagingQueryProvider target, String fieldName, Class fieldType) { + Field field = ReflectionUtils.findField(AbstractSqlPagingQueryProvider.class, fieldName, fieldType); + ReflectionUtils.makeAccessible(field); + return (T) ReflectionUtils.getField(field, target); + } + + @Override + public String generateFirstPageQuery(int pageSize) { + return generateRowNumSqlQuery(false, pageSize); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + return generateRowNumSqlQuery(true, pageSize); + } + + @Override + public String generateJumpToItemQuery(int itemIndex, int pageSize) { + int page = itemIndex / pageSize; + int offset = (page * pageSize); + offset = (offset == 0) ? 1 : offset; + String sortKeyInnerSelect = this.getSortKeySelect(true); + String sortKeyOuterSelect = this.getSortKeySelect(false); + return SqlPagingQueryUtils.generateRowNumSqlQueryWithNesting(this, sortKeyInnerSelect, sortKeyOuterSelect, + false, "TMP_ROW_NUM = " + offset); + } + + private String getSortKeySelect(boolean withAliases) { + StringBuilder sql = new StringBuilder(); + Map sortKeys = (withAliases) ? this.getSortKeys() : this.getSortKeysWithoutAliases(); + sql.append(sortKeys.keySet().stream().collect(Collectors.joining(","))); + return sql.toString(); + } + + // Taken from SqlPagingQueryUtils.generateRowNumSqlQuery but use sortKeysWithoutAlias + // for outer sort condition. + private String generateRowNumSqlQuery(boolean remainingPageQuery, int pageSize) { + StringBuilder sql = new StringBuilder(); + sql.append("SELECT * FROM (SELECT ").append(getSelectClause()); + sql.append(" FROM ").append(this.getFromClause()); + if (StringUtils.hasText(this.getWhereClause())) { + sql.append(" WHERE ").append(this.getWhereClause()); + } + if (StringUtils.hasText(this.getGroupClause())) { + sql.append(" GROUP BY ").append(this.getGroupClause()); + } + // inner sort by + sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(this)); + sql.append(") WHERE ").append("ROWNUM <= " + pageSize); + if (remainingPageQuery) { + sql.append(" AND "); + // For the outer sort we want to use sort keys w/o aliases. However, + // SqlPagingQueryUtils.buildSortConditions does not allow sort keys to be passed in. + // Therefore, we temporarily set the 'sortKeys' for the call to 'buildSortConditions'. + // The alternative is to clone the 'buildSortConditions' method here and allow the sort keys to be + // passed in BUT method is gigantic and this approach is the lesser of the two evils. + Map originalSortKeys = this.getSortKeys(); + this.setSortKeys(this.getSortKeysWithoutAliases()); + try { + SqlPagingQueryUtils.buildSortConditions(this, sql); + } + finally { + this.setSortKeys(originalSortKeys); + } + } + return sql.toString(); + } + } + + /** + * A {@link SqlServerPagingQueryProvider paging provider} for {@code MSSQL} that works around the fact that the + * MSSQL provider in Spring Batch 4.x does not properly handle sort aliases when generating jump to page queries. + */ + static class SafeSqlServerPagingQueryProvider extends SqlServerPagingQueryProvider { + + SafeSqlServerPagingQueryProvider(SqlServerPagingQueryProvider delegate, DataSource dataSource) { + // Have to use reflection to retrieve the provider fields + this.setFromClause(extractField(delegate, "fromClause", String.class)); + this.setWhereClause(extractField(delegate, "whereClause", String.class)); + this.setSortKeys(extractField(delegate, "sortKeys", Map.class)); + this.setSelectClause(extractField(delegate, "selectClause", String.class)); + this.setGroupClause(extractField(delegate, "groupClause", String.class)); + try { + this.init(dataSource); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + + private T extractField(AbstractSqlPagingQueryProvider target, String fieldName, Class fieldType) { + Field field = ReflectionUtils.findField(AbstractSqlPagingQueryProvider.class, fieldName, fieldType); + ReflectionUtils.makeAccessible(field); + return (T) ReflectionUtils.getField(field, target); + } + + @Override + protected String getOverClause() { + // Overrides the parent impl to use 'getSortKeys' instead of 'getSortKeysWithoutAliases' + StringBuilder sql = new StringBuilder(); + sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(this.getSortKeys())); + return sql.toString(); + } + + } + + /** + * A {@link Db2PagingQueryProvider paging provider} for {@code DB2} that works around the fact that the + * DB2 provider in Spring Batch 4.x does not properly handle sort aliases when generating jump to page queries. + */ + static class SafeDb2PagingQueryProvider extends Db2PagingQueryProvider { + + SafeDb2PagingQueryProvider(Db2PagingQueryProvider delegate, DataSource dataSource) { + // Have to use reflection to retrieve the provider fields + this.setFromClause(extractField(delegate, "fromClause", String.class)); + this.setWhereClause(extractField(delegate, "whereClause", String.class)); + this.setSortKeys(extractField(delegate, "sortKeys", Map.class)); + this.setSelectClause(extractField(delegate, "selectClause", String.class)); + this.setGroupClause(extractField(delegate, "groupClause", String.class)); + try { + this.init(dataSource); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + + private T extractField(AbstractSqlPagingQueryProvider target, String fieldName, Class fieldType) { + Field field = ReflectionUtils.findField(AbstractSqlPagingQueryProvider.class, fieldName, fieldType); + ReflectionUtils.makeAccessible(field); + return (T) ReflectionUtils.getField(field, target); + } + + @Override + protected String getOverClause() { + // Overrides the parent impl to use 'getSortKeys' instead of 'getSortKeysWithoutAliases' + StringBuilder sql = new StringBuilder(); + sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(this.getSortKeys())); + return sql.toString(); + } + + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowJobExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowJobExecutionDao.java index bd05d51e59..4aecb3b514 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowJobExecutionDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowJobExecutionDao.java @@ -38,6 +38,7 @@ * be migrated to Spring Batch itself eventually. * * @author Gunnar Hillert + * @author Corneil du Plessis */ public class JdbcDataflowJobExecutionDao implements DataflowJobExecutionDao { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionDao.java index 02c9de1537..35a374eaf8 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionDao.java @@ -16,19 +16,21 @@ package org.springframework.cloud.dataflow.server.repository; -import java.sql.ResultSet; -import java.sql.SQLException; import java.sql.Types; import java.util.Collections; +import java.util.HashSet; import java.util.Set; import java.util.TreeSet; import javax.sql.DataSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.server.repository.support.SchemaUtilities; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.repository.dao.JdbcTaskExecutionDao; import org.springframework.dao.DataAccessException; -import org.springframework.jdbc.core.ResultSetExtractor; import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import org.springframework.util.Assert; @@ -42,22 +44,23 @@ * @author Gunnar Hillert * @author Glenn Renfro * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ public class JdbcDataflowTaskExecutionDao implements DataflowTaskExecutionDao { - + private final static Logger logger = LoggerFactory.getLogger(JdbcDataflowTaskExecutionDao.class); private final NamedParameterJdbcTemplate jdbcTemplate; private static final String DELETE_TASK_EXECUTIONS = "DELETE FROM %PREFIX%EXECUTION " - + "WHERE task_execution_id in (:taskExecutionIds)"; + + "WHERE TASK_EXECUTION_ID in (:taskExecutionIds)"; private static final String DELETE_TASK_EXECUTION_PARAMS = "DELETE FROM %PREFIX%EXECUTION_PARAMS " - + "WHERE task_execution_id in (:taskExecutionIds)"; + + "WHERE TASK_EXECUTION_ID in (:taskExecutionIds)"; private static final String DELETE_TASK_TASK_BATCH = "DELETE FROM %PREFIX%TASK_BATCH " - + "WHERE task_execution_id in (:taskExecutionIds)"; + + "WHERE TASK_EXECUTION_ID in (:taskExecutionIds)"; - private static final String SELECT_CHILD_TASK_EXECUTION_IDS = "SELECT task_execution_id FROM %PREFIX%EXECUTION " - + "WHERE parent_execution_id in (:parentTaskExecutionIds)"; + private static final String SELECT_CHILD_TASK_EXECUTION_IDS = "SELECT TASK_EXECUTION_ID FROM %PREFIX%EXECUTION " + + "WHERE PARENT_EXECUTION_ID in (:parentTaskExecutionIds)"; private static final String FIND_TASK_EXECUTION_IDS_BY_TASK_NAME = "SELECT TASK_EXECUTION_ID " + "from %PREFIX%EXECUTION where TASK_NAME = :taskName"; @@ -87,7 +90,7 @@ public class JdbcDataflowTaskExecutionDao implements DataflowTaskExecutionDao { + "from %PREFIX%EXECUTION where TASK_NAME = :taskName"; - private TaskProperties taskProperties; + private final TaskProperties taskProperties; /** * @param dataSource used by the dao to execute queries and updates the tables. @@ -104,7 +107,7 @@ public JdbcDataflowTaskExecutionDao(DataSource dataSource, TaskProperties taskPr public int deleteTaskExecutionsByTaskExecutionIds(Set taskExecutionIds) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("taskExecutionIds", taskExecutionIds); - final String query = getQuery(DELETE_TASK_EXECUTIONS); + final String query = SchemaUtilities.getQuery(DELETE_TASK_EXECUTIONS, this.taskProperties.getTablePrefix()); return this.jdbcTemplate.update(query, queryParameters); } @@ -112,7 +115,7 @@ public int deleteTaskExecutionsByTaskExecutionIds(Set taskExecutionIds) { public int deleteTaskExecutionParamsByTaskExecutionIds(Set taskExecutionIds) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("taskExecutionIds", taskExecutionIds); - final String query = getQuery(DELETE_TASK_EXECUTION_PARAMS); + final String query = SchemaUtilities.getQuery(DELETE_TASK_EXECUTION_PARAMS, this.taskProperties.getTablePrefix()); return this.jdbcTemplate.update(query, queryParameters); } @@ -120,27 +123,24 @@ public int deleteTaskExecutionParamsByTaskExecutionIds(Set taskExecutionId public int deleteTaskTaskBatchRelationshipsByTaskExecutionIds(Set taskExecutionIds) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("taskExecutionIds", taskExecutionIds); - final String query = getQuery(DELETE_TASK_TASK_BATCH); + final String query = SchemaUtilities.getQuery(DELETE_TASK_TASK_BATCH, this.taskProperties.getTablePrefix()); return this.jdbcTemplate.update(query, queryParameters); } - private String getQuery(String base) { - return StringUtils.replace(base, "%PREFIX%", this.taskProperties.getTablePrefix()); - } + @Override public Set findChildTaskExecutionIds(Set taskExecutionIds) { + logger.debug("findChildTaskExecutionIds:{}", taskExecutionIds); final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("parentTaskExecutionIds", taskExecutionIds); Set childTaskExecutionIds; try { childTaskExecutionIds = this.jdbcTemplate.query( - getQuery(SELECT_CHILD_TASK_EXECUTION_IDS), queryParameters, - new ResultSetExtractor>() { - @Override - public Set extractData(ResultSet resultSet) - throws SQLException, DataAccessException { + SchemaUtilities.getQuery(SELECT_CHILD_TASK_EXECUTION_IDS, this.taskProperties.getTablePrefix()), + queryParameters, + resultSet -> { Set jobExecutionIds = new TreeSet<>(); @@ -150,19 +150,21 @@ public Set extractData(ResultSet resultSet) } return jobExecutionIds; - } }); + Assert.notNull(childTaskExecutionIds, "Expected childTaskExecutionIds"); } catch (DataAccessException e) { childTaskExecutionIds = Collections.emptySet(); } - if (!childTaskExecutionIds.isEmpty()) { - childTaskExecutionIds.addAll(this.findChildTaskExecutionIds(childTaskExecutionIds)); + Set newChildren = new HashSet<>(childTaskExecutionIds); + newChildren.removeAll(taskExecutionIds); + if(!newChildren.isEmpty()) { + childTaskExecutionIds.addAll(this.findChildTaskExecutionIds(newChildren)); + } } - + logger.debug("findChildTaskExecutionIds:childTaskExecutionIds={}", childTaskExecutionIds); return childTaskExecutionIds; - } @Override @@ -171,11 +173,10 @@ public Set getTaskExecutionIdsByTaskName(String taskName) { .addValue("taskName", taskName, Types.VARCHAR); try { - return this.jdbcTemplate.query(getQuery(FIND_TASK_EXECUTION_IDS_BY_TASK_NAME), - queryParameters, new ResultSetExtractor>() { - @Override - public Set extractData(ResultSet resultSet) - throws SQLException, DataAccessException { + return this.jdbcTemplate.query( + SchemaUtilities.getQuery(FIND_TASK_EXECUTION_IDS_BY_TASK_NAME, this.taskProperties.getTablePrefix()), + queryParameters, + resultSet -> { Set taskExecutionIds = new TreeSet<>(); while (resultSet.next()) { @@ -183,7 +184,6 @@ public Set extractData(ResultSet resultSet) .add(resultSet.getLong("TASK_EXECUTION_ID")); } return taskExecutionIds; - } }); } catch (DataAccessException e) { @@ -193,60 +193,54 @@ public Set extractData(ResultSet resultSet) @Override public Integer getAllTaskExecutionsCount(boolean onlyCompleted, String taskName) { - String QUERY = null; - MapSqlParameterSource queryParameters = null; + String QUERY; + MapSqlParameterSource queryParameters = new MapSqlParameterSource(); if (StringUtils.hasText(taskName)) { - queryParameters = new MapSqlParameterSource() - .addValue("taskName", taskName, Types.VARCHAR); + queryParameters.addValue("taskName", taskName, Types.VARCHAR); QUERY = (onlyCompleted) ? GET_COMPLETED_TASK_EXECUTIONS_COUNT_BY_TASK_NAME : GET_ALL_TASK_EXECUTIONS_COUNT_BY_TASK_NAME; } else { QUERY = (onlyCompleted) ? GET_COMPLETED_TASK_EXECUTIONS_COUNT: GET_ALL_TASK_EXECUTIONS_COUNT; } try { - return this.jdbcTemplate.query(getQuery(QUERY), - queryParameters, new ResultSetExtractor() { - @Override - public Integer extractData(ResultSet resultSet) - throws SQLException, DataAccessException { + return this.jdbcTemplate.query( + SchemaUtilities.getQuery(QUERY, this.taskProperties.getTablePrefix()), + queryParameters, + resultSet -> { if (resultSet.next()) { return resultSet.getInt("count"); } - return Integer.valueOf(0); - } + return 0; }); } catch (DataAccessException e) { - return Integer.valueOf(0); + return 0; } } @Override public Set getAllTaskExecutionIds(boolean onlyCompleted, String taskName) { - String QUERY = null; - MapSqlParameterSource queryParameters = null; + String QUERY; + MapSqlParameterSource queryParameters = new MapSqlParameterSource(); if (StringUtils.hasText(taskName)) { - queryParameters = new MapSqlParameterSource() - .addValue("taskName", taskName, Types.VARCHAR); + queryParameters.addValue("taskName", taskName, Types.VARCHAR); QUERY = (onlyCompleted) ? FIND_ALL_COMPLETED_TASK_EXECUTION_IDS_BY_TASK_NAME : FIND_ALL_TASK_EXECUTION_IDS_BY_TASK_NAME; } else { QUERY = (onlyCompleted) ? FIND_ALL_COMPLETED_TASK_EXECUTION_IDS : FIND_ALL_TASK_EXECUTION_IDS; } try { - return this.jdbcTemplate.query(getQuery(QUERY), queryParameters, new ResultSetExtractor>() { - @Override - public Set extractData(ResultSet resultSet) - throws SQLException, DataAccessException { + return this.jdbcTemplate.query( + SchemaUtilities.getQuery(QUERY, this.taskProperties.getTablePrefix()), + queryParameters, + resultSet -> { Set taskExecutionIds = new TreeSet<>(); while (resultSet.next()) { - taskExecutionIds - .add(resultSet.getLong("TASK_EXECUTION_ID")); + taskExecutionIds.add(resultSet.getLong("TASK_EXECUTION_ID")); } return taskExecutionIds; - } }); } catch (DataAccessException e) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionMetadataDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionMetadataDao.java index 878875c93d..4acbef3396 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionMetadataDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionMetadataDao.java @@ -16,16 +16,18 @@ package org.springframework.cloud.dataflow.server.repository; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Set; - import javax.sql.DataSource; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.module.SimpleModule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.cloud.dataflow.core.TaskManifest; import org.springframework.cloud.dataflow.registry.support.AppResourceCommon; @@ -33,6 +35,7 @@ import org.springframework.cloud.dataflow.server.repository.support.AppDeploymentRequestMixin; import org.springframework.cloud.dataflow.server.repository.support.Order; import org.springframework.cloud.dataflow.server.repository.support.PagingQueryProvider; +import org.springframework.cloud.dataflow.server.repository.support.SchemaUtilities; import org.springframework.cloud.dataflow.server.repository.support.SqlPagingQueryProviderFactoryBean; import org.springframework.cloud.dataflow.server.service.impl.ResourceDeserializer; import org.springframework.cloud.dataflow.server.service.impl.ResourceMixin; @@ -52,23 +55,24 @@ * JDBC implementation for the {@code DataflowTaskExecutionMetadataDao} * * @author Michael Minella - * @since 2.3 + * @author Corneil du Plessis * @see DataflowTaskExecutionMetadataDao + * @since 2.3 */ public class JdbcDataflowTaskExecutionMetadataDao implements DataflowTaskExecutionMetadataDao { + private final static Logger logger = LoggerFactory.getLogger(JdbcDataflowTaskExecutionMetadataDao.class); - private static final String INSERT_SQL = "INSERT INTO task_execution_metadata (id, task_execution_id, " + - "task_execution_manifest) VALUES (:id, :taskExecutionId, :taskExecutionManifest)"; + private static final String INSERT_SQL = "INSERT INTO %PREFIX%EXECUTION_METADATA (ID, TASK_EXECUTION_ID, " + + "TASK_EXECUTION_MANIFEST) VALUES (:id, :taskExecutionId, :taskExecutionManifest)"; - private static final String FIND_MANIFEST_BY_TASK_EXECUTION_ID = "select m.task_execution_manifest as task_execution_manifest " + - "from task_execution_metadata m inner join " + - "TASK_EXECUTION e on m.task_execution_id = e.TASK_EXECUTION_ID " + - "where e.TASK_EXECUTION_ID = :taskExecutionId"; + private static final String FIND_MANIFEST_BY_TASK_EXECUTION_ID = "SELECT M.TASK_EXECUTION_MANIFEST AS TASK_EXECUTION_MANIFEST " + + "FROM %PREFIX%EXECUTION_METADATA M INNER JOIN " + + "%PREFIX%EXECUTION E ON M.TASK_EXECUTION_ID = E.TASK_EXECUTION_ID " + + "WHERE E.TASK_EXECUTION_ID = :taskExecutionId"; + private static final String FIND_MANIFEST_BY_TASK_EXECUTION_IDS = "SELECT M.TASK_EXECUTION_MANIFEST AS TASK_EXECUTION_MANIFEST, M.TASK_EXECUTION_ID AS TASK_EXECUTION_ID " + + "FROM %PREFIX%EXECUTION_METADATA M WHERE M.TASK_EXECUTION_ID in (:taskExecutionIds)"; - private static final String DELETE_MANIFEST_BY_TASK_EXECUTION_IDS = - "DELETE FROM task_execution_metadata " + - "WHERE task_execution_id " + - "IN (:taskExecutionIds)"; + private static final String DELETE_MANIFEST_BY_TASK_EXECUTION_IDS = "DELETE FROM %PREFIX%EXECUTION_METADATA WHERE TASK_EXECUTION_ID IN (:taskExecutionIds)"; private final NamedParameterJdbcTemplate jdbcTemplate; @@ -78,9 +82,14 @@ public class JdbcDataflowTaskExecutionMetadataDao implements DataflowTaskExecuti private final DataSource dataSource; - public JdbcDataflowTaskExecutionMetadataDao(DataSource dataSource, - DataFieldMaxValueIncrementer incrementer) { + private final String tablePrefix; + public JdbcDataflowTaskExecutionMetadataDao( + DataSource dataSource, + DataFieldMaxValueIncrementer incrementer, + String prefix + ) { + this.tablePrefix = prefix; this.incrementer = incrementer; this.jdbcTemplate = new NamedParameterJdbcTemplate(dataSource); @@ -88,7 +97,7 @@ public JdbcDataflowTaskExecutionMetadataDao(DataSource dataSource, this.objectMapper = new ObjectMapper(); SimpleModule module = new SimpleModule(); module.addDeserializer(Resource.class, - new ResourceDeserializer(new AppResourceCommon(new MavenProperties(), new DefaultResourceLoader()))); + new ResourceDeserializer(new AppResourceCommon(new MavenProperties(), new DefaultResourceLoader()))); this.objectMapper.registerModule(module); this.objectMapper.addMixIn(Resource.class, ResourceMixin.class); this.objectMapper.addMixIn(AppDefinition.class, AppDefinitionMixin.class); @@ -104,13 +113,14 @@ public void save(TaskExecution taskExecution, TaskManifest manifest) { final String manifestJson = this.objectMapper.writeValueAsString(manifest); final MapSqlParameterSource queryParameters = new MapSqlParameterSource() - .addValue("id", incrementer.nextLongValue()) - .addValue("taskExecutionId", taskExecution.getExecutionId()) - .addValue("taskExecutionManifest", manifestJson); - - this.jdbcTemplate.update(INSERT_SQL, queryParameters); - } - catch (JsonProcessingException e) { + .addValue("id", incrementer.nextLongValue()) + .addValue("taskExecutionId", taskExecution.getExecutionId()) + .addValue("taskExecutionManifest", manifestJson); + + String sql = SchemaUtilities.getQuery(INSERT_SQL, tablePrefix); + logger.debug("save:sql={}, parameters={}", sql, queryParameters.getValues()); + this.jdbcTemplate.update(sql, queryParameters); + } catch (JsonProcessingException e) { throw new IllegalArgumentException("Unable to serialize manifest", e); } } @@ -118,14 +128,16 @@ public void save(TaskExecution taskExecution, TaskManifest manifest) { @Override public TaskManifest getLatestManifest(String taskName) { Map sortKeys = new HashMap<>(1); - sortKeys.put("e.TASK_EXECUTION_ID", Order.DESCENDING); + sortKeys.put("E.TASK_EXECUTION_ID", Order.DESCENDING); SqlPagingQueryProviderFactoryBean sqlPagingQueryProviderFactoryBean = new SqlPagingQueryProviderFactoryBean(); sqlPagingQueryProviderFactoryBean.setDataSource(this.dataSource); - sqlPagingQueryProviderFactoryBean.setSelectClause("task_execution_manifest"); - sqlPagingQueryProviderFactoryBean.setFromClause("task_execution_metadata m inner join TASK_EXECUTION e on m.task_execution_id = e.TASK_EXECUTION_ID"); - sqlPagingQueryProviderFactoryBean.setWhereClause("e.TASK_NAME = :taskName"); + sqlPagingQueryProviderFactoryBean.setSelectClause("TASK_EXECUTION_MANIFEST"); + sqlPagingQueryProviderFactoryBean.setFromClause(SchemaUtilities.getQuery( + "%PREFIX%EXECUTION_METADATA M INNER JOIN %PREFIX%EXECUTION E ON M.TASK_EXECUTION_ID = E.TASK_EXECUTION_ID", + tablePrefix)); + sqlPagingQueryProviderFactoryBean.setWhereClause("E.TASK_NAME = :taskName"); sqlPagingQueryProviderFactoryBean.setSortKeys(sortKeys); try { @@ -134,22 +146,20 @@ public TaskManifest getLatestManifest(String taskName) { queryProvider.init(this.dataSource); final MapSqlParameterSource queryParameters = new MapSqlParameterSource() - .addValue("taskName", taskName); - - return this.jdbcTemplate.queryForObject(queryProvider.getPageQuery(PageRequest.of(0, 1)), - queryParameters, (resultSet, i) -> { - try { - return objectMapper.readValue(resultSet.getString("task_execution_manifest"), TaskManifest.class); - } - catch (IOException e) { - throw new IllegalArgumentException("Unable to deserialize manifest", e); - } - }); - } - catch (EmptyResultDataAccessException erdae) { + .addValue("taskName", taskName); + + String sql = queryProvider.getPageQuery(PageRequest.of(0, 1)); + logger.debug("getLatestManifest:sql={},parameters={}", sql, queryParameters.getValues()); + return this.jdbcTemplate.queryForObject(sql, queryParameters, (resultSet, i) -> { + try { + return objectMapper.readValue(resultSet.getString("TASK_EXECUTION_MANIFEST"), TaskManifest.class); + } catch (IOException e) { + throw new IllegalArgumentException("Unable to deserialize manifest", e); + } + }); + } catch (EmptyResultDataAccessException erdae) { return null; - } - catch (Exception e) { + } catch (Exception e) { throw new IllegalStateException("Unable to generate query", e); } } @@ -157,29 +167,56 @@ public TaskManifest getLatestManifest(String taskName) { @Override public TaskManifest findManifestById(Long id) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() - .addValue("taskExecutionId", id); + .addValue("taskExecutionId", id); try { - return this.jdbcTemplate.queryForObject(FIND_MANIFEST_BY_TASK_EXECUTION_ID, - queryParameters, - (resultSet, i) -> { - try { - return objectMapper.readValue(resultSet.getString("task_execution_manifest"), TaskManifest.class); - } - catch (IOException e) { - throw new IllegalArgumentException("Unable to deserialize manifest", e); - } - }); - } - catch (EmptyResultDataAccessException erdae) { + String sql = SchemaUtilities.getQuery(FIND_MANIFEST_BY_TASK_EXECUTION_ID, tablePrefix); + logger.debug("findManifestById:sql={}, parameters={}", sql, queryParameters); + return this.jdbcTemplate.queryForObject(sql, queryParameters, (resultSet, i) -> { + try { + return objectMapper.readValue(resultSet.getString("TASK_EXECUTION_MANIFEST"), TaskManifest.class); + } catch (IOException e) { + throw new IllegalArgumentException("Unable to deserialize manifest", e); + } + }); + } catch (EmptyResultDataAccessException erdae) { return null; } } + @Override + public Map findManifestByIds(Set ids) { + final MapSqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskExecutionIds", ids); + + try { + String sql = SchemaUtilities.getQuery(FIND_MANIFEST_BY_TASK_EXECUTION_IDS, tablePrefix); + logger.debug("findManifestByIds:sql={}, parameters={}", sql, queryParameters); + final Map result = new HashMap<>(); + this.jdbcTemplate.query(sql, queryParameters, rs -> { + try { + String executionManifest = rs.getString("TASK_EXECUTION_MANIFEST"); + if(executionManifest != null && !executionManifest.trim().isEmpty()) { + result.put(rs.getLong("TASK_EXECUTION_ID"), + objectMapper.readValue(executionManifest, TaskManifest.class)); + } + } + catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + }); + return result; + } catch (EmptyResultDataAccessException erdae) { + return Collections.emptyMap(); + } + } + @Override public int deleteManifestsByTaskExecutionIds(Set taskExecutionIds) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() - .addValue("taskExecutionIds", taskExecutionIds); - return this.jdbcTemplate.update(DELETE_MANIFEST_BY_TASK_EXECUTION_IDS, queryParameters); + .addValue("taskExecutionIds", taskExecutionIds); + String sql = SchemaUtilities.getQuery(DELETE_MANIFEST_BY_TASK_EXECUTION_IDS, tablePrefix); + logger.debug("deleteManifestsByTaskExecutionIds:sql={}, parameters={}", sql, queryParameters); + return this.jdbcTemplate.update(sql, queryParameters); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JobExecutionDaoContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JobExecutionDaoContainer.java new file mode 100644 index 0000000000..4876834e69 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JobExecutionDaoContainer.java @@ -0,0 +1,62 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository; + +import java.util.HashMap; +import java.util.Map; +import javax.sql.DataSource; + +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.server.batch.BatchVersion; +import org.springframework.cloud.dataflow.server.batch.JdbcSearchableJobExecutionDao; +import org.springframework.cloud.dataflow.server.batch.SearchableJobExecutionDao; +import org.springframework.cloud.dataflow.server.controller.NoSuchSchemaTargetException; +import org.springframework.util.StringUtils; + +/** + * Provides a container of {@link SearchableJobExecutionDao} for each schema target. + * @author Corneil du Plessis + */ +public class JobExecutionDaoContainer { + private final Map container = new HashMap<>(); + + public JobExecutionDaoContainer(DataSource dataSource, SchemaService schemaService) { + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + BatchVersion batchVersion = BatchVersion.from(target); + JdbcSearchableJobExecutionDao jdbcSearchableJobExecutionDao = new JdbcSearchableJobExecutionDao(batchVersion); + jdbcSearchableJobExecutionDao.setDataSource(dataSource); + jdbcSearchableJobExecutionDao.setTablePrefix(target.getBatchPrefix()); + try { + jdbcSearchableJobExecutionDao.afterPropertiesSet(); + container.put(target.getName(), jdbcSearchableJobExecutionDao); + } catch (Throwable x) { + throw new RuntimeException("Exception creating JdbcSearchableJobExecutionDao from:" + target.getName(), x); + } + } + } + + public SearchableJobExecutionDao get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + if(!container.containsKey(schemaTarget)) { + throw new NoSuchSchemaTargetException(schemaTarget); + } + return container.get(schemaTarget); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JobRepositoryContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JobRepositoryContainer.java new file mode 100644 index 0000000000..c3914de4b1 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JobRepositoryContainer.java @@ -0,0 +1,59 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository; + +import javax.sql.DataSource; +import java.util.HashMap; +import java.util.Map; + +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.server.controller.NoSuchSchemaTargetException; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.util.StringUtils; + +public class JobRepositoryContainer { + private final Map container = new HashMap<>(); + + public JobRepositoryContainer(DataSource dataSource, PlatformTransactionManager transactionManager, SchemaService schemaService) { + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + JobRepositoryFactoryBean factoryBean = new JobRepositoryFactoryBean(); + factoryBean.setDataSource(dataSource); + factoryBean.setTablePrefix(target.getBatchPrefix()); + factoryBean.setTransactionManager(transactionManager); + + try { + factoryBean.afterPropertiesSet(); + container.put(target.getName(), factoryBean.getObject()); + } catch (Throwable x) { + throw new RuntimeException("Exception creating JobRepository for:" + target.getName(), x); + } + } + } + + public JobRepository get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + if(!container.containsKey(schemaTarget)) { + throw new NoSuchSchemaTargetException(schemaTarget); + } + return container.get(schemaTarget); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/NoSuchTaskExecutionException.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/NoSuchTaskExecutionException.java index f7ed7b657a..52df3e674e 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/NoSuchTaskExecutionException.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/NoSuchTaskExecutionException.java @@ -36,8 +36,11 @@ public class NoSuchTaskExecutionException extends RuntimeException { * * @param id the id of the {@link TaskExecution} that could not be found */ - public NoSuchTaskExecutionException(long id) { - super("Could not find TaskExecution with id " + id); + public NoSuchTaskExecutionException(long id, String schemaTarget) { + super("Could not find TaskExecution with id " + id + " for schema target " + schemaTarget); + } + public NoSuchTaskExecutionException(String externalExecutionId, String platform) { + super("Could not find TaskExecution with id " + externalExecutionId + " for platform " + platform); } /** @@ -45,7 +48,7 @@ public NoSuchTaskExecutionException(long id) { * * @param ids the ids of the {@link TaskExecution} that could not be found */ - public NoSuchTaskExecutionException(Set ids) { - super("Could not find TaskExecutions with the following ids: " + StringUtils.collectionToDelimitedString(ids, ", ")); + public NoSuchTaskExecutionException(Set ids, String schemaTarget) { + super("Could not find TaskExecutions for schema target " + schemaTarget + " with the following ids: " + StringUtils.collectionToDelimitedString(ids, ", ")); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskBatchDaoContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskBatchDaoContainer.java new file mode 100644 index 0000000000..14c38db946 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskBatchDaoContainer.java @@ -0,0 +1,47 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository; + +import javax.sql.DataSource; +import java.util.HashMap; +import java.util.Map; + +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.task.batch.listener.TaskBatchDao; +import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; +import org.springframework.util.StringUtils; + +/** + * Provides a container of {@link TaskBatchDao} for each schema target + * @author Corneil du Plessis + */ +public class TaskBatchDaoContainer { + private final Map taskBatchDaoContainer = new HashMap<>(); + + public TaskBatchDaoContainer(DataSource dataSource, SchemaService schemaService) { + for(SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + taskBatchDaoContainer.put(target.getName(), new JdbcTaskBatchDao(dataSource, target.getTaskPrefix())); + } + } + public TaskBatchDao get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + return taskBatchDaoContainer.get(schemaTarget); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDefinitionRepository.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDefinitionRepository.java index b4c348f1f7..d67268bef5 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDefinitionRepository.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDefinitionRepository.java @@ -32,14 +32,19 @@ public interface TaskDefinitionRepository extends KeyValueRepository findByTaskNameContains(String taskName, Pageable pageable); - Page findByTaskNameContainsAndDslTextContains(String taskName, String dslText, Pageable pageable); + Page findByDescriptionContains(String description, Pageable pageable); Page findByDslTextContains(String dslText, Pageable pageable); + @Deprecated + Page findByTaskNameContainsAndDslTextContains(String taskName, String dslText, Pageable pageable); + /** * Performs a findByName query and throws an exception if the name is not found. * @param name the name of the task definition * @return The task definition instance or {@link NoSuchTaskDefinitionException} if not found. */ TaskDefinition findByTaskName(String name); + + long countByTaskName(String taskName); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDeploymentRepository.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDeploymentRepository.java index fb8c25609f..53fa9454fb 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDeploymentRepository.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDeploymentRepository.java @@ -15,6 +15,8 @@ */ package org.springframework.cloud.dataflow.server.repository; +import java.util.List; + import org.springframework.cloud.dataflow.core.TaskDeployment; import org.springframework.data.keyvalue.repository.KeyValueRepository; import org.springframework.transaction.annotation.Transactional; @@ -27,5 +29,7 @@ public interface TaskDeploymentRepository extends KeyValueRepository taskExecutionDaoContainer = new HashMap<>(); + + public TaskExecutionDaoContainer(DataSource dataSource, SchemaService schemaService) { + for(SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + TaskExecutionDaoFactoryBean factoryBean = new MultiSchemaTaskExecutionDaoFactoryBean(dataSource, target.getTaskPrefix()); + try { + this.taskExecutionDaoContainer.put(target.getName(), factoryBean.getObject()); + } catch (Throwable x) { + throw new RuntimeException("Exception creating TaskExecutionDao for " + target.getName(), x); + } + } + } + + public TaskExecutionDao get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + return taskExecutionDaoContainer.get(schemaTarget); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/H2PagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/H2PagingQueryProvider.java index 48b2268ceb..79b604482a 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/H2PagingQueryProvider.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/H2PagingQueryProvider.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,14 +22,15 @@ * H2 implementation of a {@link PagingQueryProvider} using database specific features. * * @author Glenn Renfro + * @author Chris Bono */ public class H2PagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String getPageQuery(Pageable pageable) { - String topClause = new StringBuilder().append("LIMIT ").append(pageable.getOffset()).append(" ") - .append(pageable.getPageSize()).toString(); - return SqlPagingQueryUtils.generateTopJumpToQuery(this, topClause); + String limitClause = new StringBuilder().append("OFFSET ") + .append(pageable.getOffset()).append(" ROWS FETCH NEXT ") + .append(pageable.getPageSize()).append(" ROWS ONLY").toString(); + return SqlPagingQueryUtils.generateLimitJumpToQuery(this, limitClause); } - } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/MariaDBPagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/MariaDBPagingQueryProvider.java new file mode 100644 index 0000000000..5844c0ef56 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/MariaDBPagingQueryProvider.java @@ -0,0 +1,35 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository.support; + +import org.springframework.data.domain.Pageable; + +/** + * MySQL implementation of a {@link PagingQueryProvider} using database specific features. + * + * @author Glenn Renfro + * @author Corneil du Plessis + */ +public class MariaDBPagingQueryProvider extends AbstractSqlPagingQueryProvider { + @Override + public String getPageQuery(Pageable pageable) { + String topClause = "LIMIT " + pageable.getOffset() + ", " + + pageable.getPageSize(); + return SqlPagingQueryUtils.generateLimitJumpToQuery(this, topClause); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SchemaUtilities.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SchemaUtilities.java new file mode 100644 index 0000000000..e5516585bd --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SchemaUtilities.java @@ -0,0 +1,15 @@ +package org.springframework.cloud.dataflow.server.repository.support; + +import org.springframework.util.StringUtils; + +public class SchemaUtilities { + private SchemaUtilities() { + } + + public static String getQuery(String query, String prefix, String defaultPrefix) { + return StringUtils.replace(query, "%PREFIX%", prefix != null ? prefix : defaultPrefix); + } + public static String getQuery(String query, String prefix) { + return StringUtils.replace(query, "%PREFIX%", prefix); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java index 776e8e504a..4801836c73 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java @@ -16,12 +16,14 @@ package org.springframework.cloud.dataflow.server.repository.support; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import javax.sql.DataSource; import org.springframework.beans.factory.FactoryBean; +import org.springframework.cloud.dataflow.core.database.support.DatabaseType; import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -47,16 +49,19 @@ public class SqlPagingQueryProviderFactoryBean implements FactoryBean sortKeys; - private Map providers = new HashMap(); - - { - providers.put(DatabaseType.HSQL, new HsqlPagingQueryProvider()); - providers.put(DatabaseType.H2, new H2PagingQueryProvider()); - providers.put(DatabaseType.MYSQL, new MySqlPagingQueryProvider()); - providers.put(DatabaseType.POSTGRES, new PostgresPagingQueryProvider()); - providers.put(DatabaseType.ORACLE, new OraclePagingQueryProvider()); - providers.put(DatabaseType.SQLSERVER, new SqlServerPagingQueryProvider()); - providers.put(DatabaseType.DB2, new Db2PagingQueryProvider()); + private final static Map providers; + + static { + Map providerMap = new HashMap(); + providerMap.put(DatabaseType.HSQL, new HsqlPagingQueryProvider()); + providerMap.put(DatabaseType.H2, new H2PagingQueryProvider()); + providerMap.put(DatabaseType.MYSQL, new MySqlPagingQueryProvider()); + providerMap.put(DatabaseType.MARIADB, new MariaDBPagingQueryProvider()); + providerMap.put(DatabaseType.POSTGRES, new PostgresPagingQueryProvider()); + providerMap.put(DatabaseType.ORACLE, new OraclePagingQueryProvider()); + providerMap.put(DatabaseType.SQLSERVER, new SqlServerPagingQueryProvider()); + providerMap.put(DatabaseType.DB2, new Db2PagingQueryProvider()); + providers = Collections.unmodifiableMap(providerMap); } /** diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobExplorerContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobExplorerContainer.java new file mode 100644 index 0000000000..be2be2b58e --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobExplorerContainer.java @@ -0,0 +1,40 @@ +package org.springframework.cloud.dataflow.server.service; + +import javax.sql.DataSource; +import java.util.HashMap; +import java.util.Map; + +import org.springframework.batch.core.explore.JobExplorer; +import org.springframework.batch.core.explore.support.JobExplorerFactoryBean; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.server.controller.NoSuchSchemaTargetException; +import org.springframework.util.StringUtils; + +public class JobExplorerContainer { + private final Map container = new HashMap<>(); + + public JobExplorerContainer(DataSource dataSource, SchemaService schemaService) { + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + JobExplorerFactoryBean factoryBean = new JobExplorerFactoryBean(); + factoryBean.setDataSource(dataSource); + factoryBean.setTablePrefix(target.getBatchPrefix()); + try { + factoryBean.afterPropertiesSet(); + container.put(target.getName(), factoryBean.getObject()); + } catch (Throwable x) { + throw new RuntimeException("Exception creating JobExplorer for " + target.getName(), x); + } + } + } + + public JobExplorer get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + if(!container.containsKey(schemaTarget)) { + throw new NoSuchSchemaTargetException(schemaTarget); + } + return container.get(schemaTarget); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobServiceContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobServiceContainer.java new file mode 100644 index 0000000000..e4c94f8401 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobServiceContainer.java @@ -0,0 +1,87 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.service; + +import java.util.HashMap; +import java.util.Map; + +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.core.launch.support.SimpleJobLauncher; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.server.batch.AllInOneExecutionContextSerializer; +import org.springframework.cloud.dataflow.server.batch.JobService; +import org.springframework.cloud.dataflow.server.batch.SimpleJobServiceFactoryBean; +import org.springframework.cloud.dataflow.server.controller.NoSuchSchemaTargetException; +import org.springframework.cloud.dataflow.server.repository.JobRepositoryContainer; +import org.springframework.core.env.Environment; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.util.StringUtils; + +/** + * The container provides implementations of JobService for each SchemaTarget. + * + * @author Corneil du Plessis + */ +public class JobServiceContainer { + private final static Logger logger = LoggerFactory.getLogger(JobServiceContainer.class); + private final Map container = new HashMap<>(); + + public JobServiceContainer( + DataSource dataSource, + PlatformTransactionManager platformTransactionManager, + SchemaService schemaService, + JobRepositoryContainer jobRepositoryContainer, + JobExplorerContainer jobExplorerContainer, + Environment environment) { + + for(SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + SimpleJobServiceFactoryBean factoryBean = new SimpleJobServiceFactoryBean(); + factoryBean.setEnvironment(environment); + factoryBean.setDataSource(dataSource); + factoryBean.setTransactionManager(platformTransactionManager); + factoryBean.setJobServiceContainer(this); + factoryBean.setJobLauncher(new SimpleJobLauncher()); + factoryBean.setJobExplorer(jobExplorerContainer.get(target.getName())); + factoryBean.setJobRepository(jobRepositoryContainer.get(target.getName())); + factoryBean.setTablePrefix(target.getBatchPrefix()); + factoryBean.setTaskTablePrefix(target.getTaskPrefix()); + factoryBean.setAppBootSchemaVersionTarget(target); + factoryBean.setSchemaService(schemaService); + factoryBean.setSerializer(new AllInOneExecutionContextSerializer()); + try { + factoryBean.afterPropertiesSet(); + container.put(target.getName(), factoryBean.getObject()); + } catch (Throwable x) { + throw new RuntimeException("Exception creating JobService for " + target.getName(), x); + } + } + } + public JobService get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + logger.info("get:default={}", schemaTarget); + } + if(!container.containsKey(schemaTarget)) { + throw new NoSuchSchemaTargetException(schemaTarget); + } + return container.get(schemaTarget); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/LauncherInitializationService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/LauncherInitializationService.java index 01a9cf24f5..734b290379 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/LauncherInitializationService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/LauncherInitializationService.java @@ -60,10 +60,10 @@ public void initialize(ApplicationReadyEvent event) { launcher.getType()); launcher.setOptions(options); this.launcherRepository.save(launcher); - logger.info(String.format( - "Added '%s' platform account '%s' into Task Launcher repository.", + logger.info( + "Added '{}' platform account '{}' into Task Launcher repository.", platform.getName(), - launcher.getName())); + launcher.getName()); }); }); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/SpringSecurityAuditorAware.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/SpringSecurityAuditorAware.java index 6ddc22fe80..644e2a6f7e 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/SpringSecurityAuditorAware.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/SpringSecurityAuditorAware.java @@ -1,5 +1,5 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.springframework.cloud.dataflow.server.service; import java.util.Optional; @@ -26,6 +27,7 @@ /** * * @author Gunnar Hillert + * @author Corneil du Plessis * */ public class SpringSecurityAuditorAware implements AuditorAware { @@ -41,8 +43,8 @@ public Optional getCurrentAuditor() { final boolean authenticationEnabled = securityStateBean.isAuthenticationEnabled(); if (authenticationEnabled && SecurityContextHolder.getContext() != null) { final Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); - if (!(authentication instanceof AnonymousAuthenticationToken)) { - return Optional.of(authentication.getName()); + if (authentication != null && !(authentication instanceof AnonymousAuthenticationToken)) { + return Optional.ofNullable(authentication.getName()); } } return Optional.ofNullable(null); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/StreamService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/StreamService.java index d74167e2cc..5b1b999f0a 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/StreamService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/StreamService.java @@ -96,11 +96,13 @@ public interface StreamService { * @param description description of the stream definition * @param deploy if {@code true}, the stream is deployed upon creation (default is * {@code false}) + * @param deploymentProperties the optional deployment properties to use when the stream is deployed upon creation * @return the created stream definition already exists * @throws InvalidStreamDefinitionException if there are errors in parsing the stream DSL, * resolving the name, or type of applications in the stream */ - StreamDefinition createStream(String streamName, String dsl, String description, boolean deploy); + StreamDefinition createStream(String streamName, String dsl, String description, boolean deploy, + Map deploymentProperties); /** * Deploys the stream with the user provided deployment properties. diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskDeleteService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskDeleteService.java index 1a9f44fec6..85ad7bfc24 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskDeleteService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskDeleteService.java @@ -33,7 +33,7 @@ public interface TaskDeleteService { * * @param id the execution id */ - void cleanupExecution(long id); + void cleanupExecution(long id, String schemaTarget); /** * Cleanup the resources that resulted from running the task with the given execution @@ -42,14 +42,41 @@ public interface TaskDeleteService { * @param actionsAsSet the actions * @param ids the id's */ - void cleanupExecutions(Set actionsAsSet, Set ids); + void cleanupExecutions(Set actionsAsSet, Set ids, String schemaTarget); + + /** + * Clean up the resources that resulted from running the task with the given name. + * + * @param actionsAsSet the actions to perform + * @param taskName the task name + * @param onlyCompleted whether to include only completed tasks + */ + void cleanupExecutions(Set actionsAsSet, String taskName, boolean onlyCompleted); + + /** + * Clean up the resources that resulted from running the task with the given name. + * + * @param actionsAsSet the actions to perform + * @param taskName the task name + * @param onlyCompleted whether to include only completed tasks (ignored when {@code includeTasksEndedMinDaysAgo} is specified) + * @param includeTasksEndedMinDaysAgo only include tasks that have ended at least this many days ago + * @since 2.11.0 + */ + void cleanupExecutions(Set actionsAsSet, String taskName, boolean onlyCompleted, Integer includeTasksEndedMinDaysAgo); /** * Delete one or more Task executions. * * @param ids Collection of task execution ids to delete. Must contain at least 1 id. */ - void deleteTaskExecutions(Set ids); + void deleteTaskExecutions(Set ids, String schemaTarget); + + /** + * Delete task executions by name and execution state. + * @param taskName the name of the task executions + * @param onlyCompleted indicator to delete only completed tasks + */ + void deleteTaskExecutions(String taskName, boolean onlyCompleted); /** * Destroy the task definition. If it is a Composed Task then the task definitions diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionCreationService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionCreationService.java index 2fc649d463..e9fae6a26f 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionCreationService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionCreationService.java @@ -30,5 +30,5 @@ public interface TaskExecutionCreationService { * @param taskName the name to be associated with the {@link TaskExecution} * @return {@link TaskExecution} */ - TaskExecution createTaskExecution(String taskName); + TaskExecution createTaskExecution(String taskName, String version); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionInfoService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionInfoService.java index bb598239a7..1392b869e2 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionInfoService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionInfoService.java @@ -18,6 +18,7 @@ import java.util.List; import java.util.Map; +import java.util.Set; import org.springframework.cloud.dataflow.core.AllPlatformsTaskExecutionInformation; import org.springframework.cloud.dataflow.server.service.impl.TaskExecutionInformation; @@ -58,4 +59,7 @@ TaskExecutionInformation findTaskExecutionInformation(String taskName, * @since 2.3 */ List createTaskDeploymentRequests(String taskName, String dslText); + + Set composedTaskChildNames(String taskName); + Set taskNames(String taskName); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionService.java index ace3d81f37..59c71e3fc4 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionService.java @@ -19,6 +19,7 @@ import java.util.Map; import java.util.Set; +import org.springframework.cloud.dataflow.core.LaunchResponse; import org.springframework.cloud.dataflow.core.TaskManifest; /** @@ -32,63 +33,90 @@ * @author Gunnar Hillert * @author David Turanski * @author Daniel Serleg + * @author Corneil du Plessis */ public interface TaskExecutionService { /** * Execute a task with the provided task name and optional runtime properties. * - * @param taskName Name of the task. Must not be null or empty. + * @param taskName Name of the task. Must not be null or empty. * @param taskDeploymentProperties Optional deployment properties. Must not be null. - * @param commandLineArgs Optional runtime commandline argument + * @param commandLineArgs Optional runtime commandline argument * @return the taskExecutionId for the executed task. */ - long executeTask(String taskName, Map taskDeploymentProperties, List commandLineArgs); + LaunchResponse executeTask(String taskName, Map taskDeploymentProperties, List commandLineArgs); /** * Retrieve logs for the task application. * * @param platformName the name of the platform - * @param taskId the ID that uniquely identifies the task + * @param taskId the ID that uniquely identifies the task * @return the logs of the task application. */ - String getLog(String platformName, String taskId); + String getLog(String platformName, String taskId, String schemaTarget); /** * Request the platform to stop the task executions for the ids provided. * * @param ids a set of ids for the task executions to be stopped. */ - void stopTaskExecution(Set ids); + void stopTaskExecution(Set ids, String schemaTarget); /** * Request the platform to stop the task executions for the ids provided. * - * @param ids a set of ids for the task executions to be stopped. - * @param platform The name of the platform where the tasks are executing. + * @param ids a set of ids for the task executions to be stopped. + * @param schemaTarget the schema target of the task execution. + * @param platform The name of the platform where the tasks are executing. */ - void stopTaskExecution(Set ids, String platform); + void stopTaskExecution(Set ids, String schemaTarget, String platform); /** * Retrieve the TaskManifest for the execution id provided - * @param id task exectution id + * + * @param id task exectution id + * @param schemaTarget the schema target of the task execution. * @return {@code TaskManifest} or null if not found. */ - TaskManifest findTaskManifestById(Long id); + TaskManifest findTaskManifestById(Long id, String schemaTarget); + + /** + * + * @param ids A set of task execution ids. + * @param schemaTarget Relevant schema target. + * @return collection of manifests mapped by the relevant task execution id. + */ + Map findTaskManifestByIds(Set ids, String schemaTarget); /** * Returns all the task execution IDs with the option to include only the completed task executions. + * * @param onlyCompleted filter by completed task executions - * @param taskName the task name, if null then retrieve all the tasks + * @param taskName the task name, if null then retrieve all the tasks + * @return the set of execution ids. * @since 2.8 */ Set getAllTaskExecutionIds(boolean onlyCompleted, String taskName); /** * Returns the count of all the task execution IDs with the option to include only the completed task executions. - * @param onlyCompleted filter by completed task executions - * @param taskName the task name, if null then retrieve all the tasks + * + * @param onlyCompleted whether to include only completed task executions + * @param taskName the task name, if null then retrieve all the tasks + * @return the number of executions * @since 2.8 */ Integer getAllTaskExecutionsCount(boolean onlyCompleted, String taskName); + + /** + * Returns the count of all the task execution IDs with the option to include only the completed task executions. + * + * @param onlyCompleted whether to include only completed task executions (ignored when {@code includeTasksEndedMinDaysAgo} is specified) + * @param taskName the task name, if null then retrieve all the tasks + * @param includeTasksEndedMinDaysAgo only include tasks that have ended at least this many days ago + * @return the number of executions, 0 if no data, never null + * @since 2.11.0 + */ + Integer getAllTaskExecutionsCount(boolean onlyCompleted, String taskName, Integer includeTasksEndedMinDaysAgo); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskJobService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskJobService.java index f79d8b05c1..079745b245 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskJobService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskJobService.java @@ -16,8 +16,11 @@ package org.springframework.cloud.dataflow.server.service; +import java.util.Collection; import java.util.Date; import java.util.List; +import java.util.Map; +import java.util.Set; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.Job; @@ -29,9 +32,11 @@ import org.springframework.batch.core.launch.NoSuchJobInstanceException; import org.springframework.cloud.dataflow.rest.job.JobInstanceExecutions; import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; import org.springframework.cloud.dataflow.server.batch.JobExecutionWithStepCount; import org.springframework.cloud.dataflow.server.job.support.JobNotRestartableException; import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; /** @@ -40,6 +45,7 @@ * * @author Glenn Renfro. * @author Gunnar Hillert + * @author Corneil du Plessis */ public interface TaskJobService { @@ -50,90 +56,65 @@ public interface TaskJobService { * @param pageable enumerates the data to be returned. * @return List containing {@link TaskJobExecution}s. * @throws NoSuchJobExecutionException in the event that a job execution id specified is - * not present when looking up stepExecutions for the result. + * not present when looking up stepExecutions for the result. */ - List listJobExecutions(Pageable pageable) throws NoSuchJobExecutionException; + Page listJobExecutions(Pageable pageable) throws NoSuchJobExecutionException; /** * Retrieves Pageable list of {@link JobExecutionWithStepCount} from the JobRepository * with a specific jobName and matches the data with a task id. * * @param pageable enumerates the data to be returned. - * @param jobName the name of the job for which to findByTaskNameContains. + * @param jobName the name of the job for which to findByTaskNameContains. * @return List containing {@link JobExecutionWithStepCount}s. * @throws NoSuchJobException if the job with the given name does not exist. */ - List listJobExecutionsForJobWithStepCount(Pageable pageable, String jobName) - throws NoSuchJobException; + Page listJobExecutionsForJobWithStepCount(Pageable pageable, String jobName) throws NoSuchJobException; /** * Retrieves a JobExecution from the JobRepository and matches it with a task id. * - * @param id the id of the {@link JobExecution} + * @param id the id of the {@link JobExecution} + * @param schemaTarget the schema target of the task job execution. * @return the {@link TaskJobExecution}s associated with the id. * @throws NoSuchJobExecutionException if the specified job execution for the id does not - * exist. + * exist. */ - TaskJobExecution getJobExecution(long id) throws NoSuchJobExecutionException; + TaskJobExecution getJobExecution(long id, String schemaTarget) throws NoSuchJobExecutionException; /** * Retrieves Pageable list of {@link JobInstanceExecutions} from the JobRepository with a * specific jobName and matches the data with the associated JobExecutions. * * @param pageable enumerates the data to be returned. - * @param jobName the name of the job for which to findByTaskNameContains. + * @param jobName the name of the job for which to findByTaskNameContains. * @return List containing {@link JobInstanceExecutions}. * @throws NoSuchJobException if the job for the jobName specified does not exist. */ - List listTaskJobInstancesForJobName(Pageable pageable, String jobName) - throws NoSuchJobException; + Page listTaskJobInstancesForJobName(Pageable pageable, String jobName) throws NoSuchJobException; /** * Retrieves a {@link JobInstance} from the JobRepository and matches it with the * associated {@link JobExecution}s. * - * @param id the id of the {@link JobInstance} + * @param id the id of the {@link JobInstance} + * @param schemaTarget the schema target of the job instance. * @return the {@link JobInstanceExecutions} associated with the id. * @throws NoSuchJobInstanceException if job instance id does not exist. - * @throws NoSuchJobException if the job for the job instance does not exist. + * @throws NoSuchJobException if the job for the job instance does not exist. */ - JobInstanceExecutions getJobInstance(long id) throws NoSuchJobInstanceException, NoSuchJobException; - - /** - * Retrieves the total number of job instances for a job name. - * - * @param jobName the name of the job instance. - * @return the number of job instances associated with the jobName. - * @throws NoSuchJobException if the job for jobName specified does not exist. - */ - int countJobInstances(String jobName) throws NoSuchJobException; - - /** - * Retrieves the total number of the job executions. - * - * @return the total number of job executions. - */ - int countJobExecutions(); - - /** - * Retrieves the total number {@link JobExecution} that match a specific job name. - * - * @param jobName the job name to findByTaskNameContains. - * @param status the status of the job execution - * @return the number of {@link JobExecution}s that match the job name. - * @throws NoSuchJobException if the job for the jobName does not exist. - */ - int countJobExecutionsForJob(String jobName, BatchStatus status) throws NoSuchJobException; + JobInstanceExecutions getJobInstance(long id, String schemaTarget) throws NoSuchJobInstanceException, NoSuchJobException; /** * Restarts a {@link JobExecution} IF the respective {@link JobExecution} is actually * deemed restartable. Otherwise a {@link JobNotRestartableException} is being thrown. * * @param jobExecutionId The id of the JobExecution to restart. + * @param schemaTarget the schema target of the job execution. * @throws NoSuchJobExecutionException if the JobExecution for the provided id does not - * exist. + * exist. */ - void restartJobExecution(long jobExecutionId) throws NoSuchJobExecutionException; + void restartJobExecution(long jobExecutionId, String schemaTarget) throws NoSuchJobExecutionException; /** * Requests a {@link JobExecution} to stop. @@ -143,14 +124,15 @@ List listTaskJobInstancesForJobName(Pageable pageable, St * responsibility of the implementor of the {@link Job} to react to that request. * Furthermore, this method does not interfere with the associated {@link TaskExecution}. * - * @param jobExecutionId The id of the {@link JobExecution} to stop - * @throws NoSuchJobExecutionException thrown if no job execution exists for the - * jobExecutionId. + * @param jobExecutionId The id of the {@link JobExecution} to stop. + * @param schemaTarget the schema target of the job execution. + * @throws NoSuchJobExecutionException thrown if no job execution exists for the + * jobExecutionId. * @throws JobExecutionNotRunningException thrown if a stop is requested on a job that is - * not running. + * not running. * @see org.springframework.cloud.dataflow.server.batch.JobService#stop(Long) */ - void stopJobExecution(long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionNotRunningException; + void stopJobExecution(long jobExecutionId, String schemaTarget) throws NoSuchJobExecutionException, JobExecutionNotRunningException; /** * Retrieves Pageable list of {@link JobExecutionWithStepCount}s from the JobRepository @@ -158,24 +140,27 @@ List listTaskJobInstancesForJobName(Pageable pageable, St * * @param pageable enumerates the data to be returned. * @return List containing {@link TaskJobExecution}s. - * * @throws NoSuchJobExecutionException thrown if the job execution specified does not - * exist. + * exist. */ - List listJobExecutionsWithStepCount(Pageable pageable) throws NoSuchJobExecutionException; + Page listJobExecutionsWithStepCount(Pageable pageable) throws NoSuchJobExecutionException; /** * Retrieves Pageable list of {@link JobExecution} from the JobRepository with a specific * jobName, status and matches the data with a task id. * * @param pageable enumerates the data to be returned. - * @param jobName the name of the job for which to findByTaskNameContains. - * @param status the BatchStatus of the job execution. + * @param jobName the name of the job for which to findByTaskNameContains. + * @param status the BatchStatus of the job execution. * @return List containing {@link TaskJobExecution}s. - * @throws NoSuchJobException if the job with the given name does not exist. + * @throws NoSuchJobException if the job with the given name does not exist. + * @throws NoSuchJobExecutionException the job execution with the given name doesn't exist. */ - List listJobExecutionsForJob(Pageable pageable, String jobName, BatchStatus status) - throws NoSuchJobException; + Page listJobExecutionsForJob( + Pageable pageable, + String jobName, + BatchStatus status + ) throws NoSuchJobException, NoSuchJobExecutionException; /** * Retrieves Pageable list of {@link JobExecutionWithStepCount} from the JobRepository @@ -183,34 +168,46 @@ List listJobExecutionsForJob(Pageable pageable, String jobName * * @param pageable enumerates the data to be returned. * @param fromDate the date which start date must be greater than. - * @param toDate the date which start date must be less than. + * @param toDate the date which start date must be less than. * @return List containing {@link JobExecutionWithStepCount}s. * @throws NoSuchJobException if the job with the given name does not exist. */ - List listJobExecutionsForJobWithStepCount(Pageable pageable, Date fromDate, Date toDate) - throws NoSuchJobException; + Page listJobExecutionsForJobWithStepCount(Pageable pageable, Date fromDate, Date toDate) throws NoSuchJobException; /** * Retrieves Pageable list of {@link JobExecutionWithStepCount} from the JobRepository * filtered by the job instance id. * - * @param pageable enumerates the data to be returned. + * @param pageable enumerates the data to be returned. * @param jobInstanceId the job instance id associated with the execution. + * @param schemaTarget the schema target of the job instance. * @return List containing {@link JobExecutionWithStepCount}s. * @throws NoSuchJobException if the job with the given name does not exist. */ - List listJobExecutionsForJobWithStepCountFilteredByJobInstanceId(Pageable pageable, int jobInstanceId) - throws NoSuchJobException; + Page listJobExecutionsForJobWithStepCountFilteredByJobInstanceId( + Pageable pageable, + int jobInstanceId, + String schemaTarget + ) throws NoSuchJobException; /** * Retrieves Pageable list of {@link JobExecutionWithStepCount} from the JobRepository * filtered by the task execution id. * - * @param pageable enumerates the data to be returned. + * @param pageable enumerates the data to be returned. * @param taskExecutionId the task execution id associated with the execution. + * @param schemaTarget the schema target of the task execution. * @return List containing {@link JobExecutionWithStepCount}s. * @throws NoSuchJobException if the job with the given name does not exist. */ - List listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId(Pageable pageable, int taskExecutionId) - throws NoSuchJobException; + Page listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId( + Pageable pageable, + int taskExecutionId, + String schemaTarget + ) throws NoSuchJobException; + + Map> getJobExecutionIdsByTaskExecutionIds(Collection taskExecutionIds, String schemaTarget); + + void populateComposeTaskRunnerStatus(Collection taskExecutions); + } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/AppDeploymentRequestCreator.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/AppDeploymentRequestCreator.java index 4fa6899ede..a56d234ec0 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/AppDeploymentRequestCreator.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/AppDeploymentRequestCreator.java @@ -35,19 +35,24 @@ import org.springframework.cloud.dataflow.core.StreamDefinitionService; import org.springframework.cloud.dataflow.core.StreamDefinitionServiceUtils; import org.springframework.cloud.dataflow.core.StreamPropertyKeys; +import org.springframework.cloud.dataflow.core.TaskPlatformFactory; import org.springframework.cloud.dataflow.registry.service.AppRegistryService; import org.springframework.cloud.dataflow.rest.util.DeploymentPropertiesUtils; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.cloud.dataflow.server.config.apps.CommonApplicationProperties; import org.springframework.cloud.dataflow.server.controller.VisibleProperties; import org.springframework.cloud.deployer.spi.app.AppDeployer; import org.springframework.cloud.deployer.spi.core.AppDefinition; import org.springframework.cloud.deployer.spi.core.AppDeploymentRequest; +import org.springframework.core.env.PropertyResolver; import org.springframework.core.io.Resource; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; /** * Create the list of {@link AppDeploymentRequest}s from a {@link StreamDefinition} and * deployment properties map. + * * @author Eric Bottard * @author Mark Fisher * @author Patrick Peralta @@ -69,10 +74,16 @@ public class AppDeploymentRequestCreator { private final StreamDefinitionService streamDefinitionService; - public AppDeploymentRequestCreator(AppRegistryService appRegistry, - CommonApplicationProperties commonApplicationProperties, - ApplicationConfigurationMetadataResolver metadataResolver, - StreamDefinitionService streamDefinitionService) { + private final PropertyResolver propertyResolver; + + public AppDeploymentRequestCreator( + AppRegistryService appRegistry, + CommonApplicationProperties commonApplicationProperties, + ApplicationConfigurationMetadataResolver metadataResolver, + StreamDefinitionService streamDefinitionService, + PropertyResolver propertyResolver + ) { + Assert.notNull(propertyResolver, "propertyResolver must not be null"); Assert.notNull(appRegistry, "AppRegistryService must not be null"); Assert.notNull(commonApplicationProperties, "CommonApplicationProperties must not be null"); Assert.notNull(metadataResolver, "MetadataResolver must not be null"); @@ -81,22 +92,26 @@ public AppDeploymentRequestCreator(AppRegistryService appRegistry, this.commonApplicationProperties = commonApplicationProperties; this.visibleProperties = new VisibleProperties(metadataResolver); this.streamDefinitionService = streamDefinitionService; + this.propertyResolver = propertyResolver; } - public List createUpdateRequests(StreamDefinition streamDefinition, - Map updateProperties) { + public List createUpdateRequests( + StreamDefinition streamDefinition, + Map updateProperties + ) { List appDeploymentRequests = new ArrayList<>(); if (updateProperties == null) { updateProperties = Collections.emptyMap(); } - Iterator iterator = StreamDefinitionServiceUtils.getDeploymentOrderIterator(this.streamDefinitionService.getAppDefinitions(streamDefinition)); + Iterator iterator = StreamDefinitionServiceUtils.getDeploymentOrderIterator(this.streamDefinitionService.getAppDefinitions( + streamDefinition)); while (iterator.hasNext()) { StreamAppDefinition currentApp = iterator.next(); ApplicationType type = currentApp.getApplicationType(); AppRegistration appRegistration = this.appRegistry.find(currentApp.getRegisteredAppName(), type); Assert.notNull(appRegistration, - String.format("no application '%s' of type '%s' exists in the registry", - currentApp.getName(), type)); + String.format("no application '%s' of type '%s' exists in the registry", + currentApp.getName(), type)); String version = extractAppVersionProperty(currentApp, updateProperties); List commandlineArguments = new ArrayList<>(); @@ -105,26 +120,78 @@ public List createUpdateRequests(StreamDefinition streamDe } Map appUpdateTimeProperties = extractAppProperties(currentApp, updateProperties); Map deployerDeploymentProperties = DeploymentPropertiesUtils - .extractAndQualifyDeployerProperties(updateProperties, currentApp.getName()); + .extractAndQualifyDeployerProperties(updateProperties, currentApp.getName()); Resource appResource = appRegistry.getAppResource(appRegistration); Resource metadataResource = appRegistry.getAppMetadataResource(appRegistration); Map expandedAppUpdateTimeProperties = (appUpdateTimeProperties.isEmpty()) ? new HashMap<>() : - this.visibleProperties.qualifyProperties(appUpdateTimeProperties, metadataResource); + this.visibleProperties.qualifyProperties(appUpdateTimeProperties, metadataResource); expandedAppUpdateTimeProperties.put(DataFlowPropertyKeys.STREAM_APP_TYPE, type.toString()); - AppDefinition appDefinition = new AppDefinition(currentApp.getName(), expandedAppUpdateTimeProperties); + addBootVersion(currentApp.getName(), appRegistration.getBootVersion(), deployerDeploymentProperties); - AppDeploymentRequest request = new AppDeploymentRequest(appDefinition, appResource, - deployerDeploymentProperties, commandlineArguments); + AppDefinition appDefinition = new AppDefinition(currentApp.getName(), expandedAppUpdateTimeProperties); + AppDeploymentRequest request = new AppDeploymentRequest(appDefinition, appResource, + deployerDeploymentProperties, commandlineArguments); + logger.debug("createUpdateRequests:request:{}", request); appDeploymentRequests.add(request); } return appDeploymentRequests; } + private void addBootVersion( + String name, + AppBootSchemaVersion bootVersion, + Map deployerDeploymentProperties + ) { + deployerDeploymentProperties.put("spring.cloud.deployer.bootVersion", bootVersion.getBootVersion()); + } + + private void addDefaultDeployerProperties( + String appName, + String platformType, + String bootVersion, + Map deploymentProperties + ) { + switch (platformType) { + case "local": { + String javaHome = propertyResolver.getProperty("spring.cloud.dataflow.defaults.boot" + bootVersion + ".local.javaHomePath"); + if (StringUtils.hasText(javaHome)) { + String property = "spring.cloud.deployer.local.javaHomePath." + bootVersion; + deploymentProperties.put(property, javaHome); + logger.debug("added:{}={}", property, javaHome); + } + break; + } + case "cloudfoundry": { + String buildpack = propertyResolver.getProperty("spring.cloud.dataflow.defaults.boot" + bootVersion + ".cloudfoundry.buildpack"); + logger.debug("Resolved defaults buildpack: " + buildpack); + if (StringUtils.hasText(buildpack)) { + deploymentProperties.put("spring.cloud.deployer.cloudfoundry.buildpack", buildpack); + logger.debug("added:spring.cloud.deployer.cloudfoundry.buildpack={}", buildpack); + } + + String buildpacks = propertyResolver.getProperty("spring.cloud.dataflow.defaults.boot" + bootVersion + ".cloudfoundry.buildpacks"); + logger.debug("Resolved defaults buildpacks: " + buildpacks); + if (StringUtils.hasText(buildpacks)) { + deploymentProperties.put("spring.cloud.deployer.cloudfoundry.buildpacks", buildpacks); + logger.debug("added:spring.cloud.deployer.cloudfoundry.buildpacks={}", buildpacks); + } + logger.debug("Using Boot Version: " + bootVersion); + if(AppBootSchemaVersion.BOOT3.getBootVersion().equals(bootVersion)) { + deploymentProperties.put("spring.cloud.deployer.cloudfoundry.env.JBP_CONFIG_OPEN_JDK_JRE", "{jre: {version: 17.+}}"); + } + break; + } + } + } + private String extractAppVersionProperty(StreamAppDefinition appDefinition, Map updateProperties) { String versionPrefix = String.format("version.%s", appDefinition.getName()); + if (updateProperties.containsKey(versionPrefix)) { + return updateProperties.get(versionPrefix); + } for (Map.Entry entry : updateProperties.entrySet()) { if (entry.getKey().startsWith(versionPrefix)) { return entry.getValue(); @@ -136,45 +203,52 @@ private String extractAppVersionProperty(StreamAppDefinition appDefinition, Map< /** * Create a list of {@link AppDeploymentRequest}s from the provided * {@link StreamDefinition} and map of deployment properties. - * @param streamDefinition the stream definition + * + * @param streamDefinition the stream definition * @param streamDeploymentProperties the stream's deployment properties + * @param platformType the platform types to include * @return list of AppDeploymentRequests */ - public List createRequests(StreamDefinition streamDefinition, - Map streamDeploymentProperties, String platformType) { + public List createRequests( + StreamDefinition streamDefinition, + Map streamDeploymentProperties, String platformType + ) { List appDeploymentRequests = new ArrayList<>(); if (streamDeploymentProperties == null) { streamDeploymentProperties = Collections.emptyMap(); } - Iterator iterator = StreamDefinitionServiceUtils.getDeploymentOrderIterator(this.streamDefinitionService.getAppDefinitions(streamDefinition)); + Iterator iterator = StreamDefinitionServiceUtils.getDeploymentOrderIterator(this.streamDefinitionService.getAppDefinitions( + streamDefinition)); int nextAppCount = 0; boolean isDownStreamAppPartitioned = false; while (iterator.hasNext()) { StreamAppDefinition currentApp = iterator.next(); AppRegistration appRegistration = this.appRegistry.find(currentApp.getRegisteredAppName(), currentApp.getApplicationType()); Assert.notNull(appRegistration, String.format("no application '%s' of type '%s' exists in the registry", - currentApp.getName(), currentApp.getApplicationType())); + currentApp.getName(), currentApp.getApplicationType())); Map appDeployTimeProperties = extractAppProperties(currentApp, streamDeploymentProperties); Map deployerDeploymentProperties = DeploymentPropertiesUtils - .extractAndQualifyDeployerProperties(streamDeploymentProperties, currentApp.getName()); + .extractAndQualifyDeployerProperties(streamDeploymentProperties, currentApp.getName()); deployerDeploymentProperties.put(AppDeployer.GROUP_PROPERTY_KEY, currentApp.getStreamName()); + String version = extractAppVersionProperty(currentApp, streamDeploymentProperties); List commandlineArguments = new ArrayList<>(); if (version != null) { // TODO ensure new version as a resource exists and load that AppRegistration commandlineArguments.add(version); } - + addDefaultDeployerProperties(currentApp.getName(), platformType, appRegistration.getBootVersion().getBootVersion(), deployerDeploymentProperties); + addBootVersion(currentApp.getName(), appRegistration.getBootVersion(), deployerDeploymentProperties); // Set instance count property if (deployerDeploymentProperties.containsKey(AppDeployer.COUNT_PROPERTY_KEY)) { appDeployTimeProperties.put(StreamPropertyKeys.INSTANCE_COUNT, - deployerDeploymentProperties.get(AppDeployer.COUNT_PROPERTY_KEY)); + deployerDeploymentProperties.get(AppDeployer.COUNT_PROPERTY_KEY)); } boolean upstreamAppSupportsPartition = upstreamAppHasPartitionInfo(streamDefinition, currentApp, - streamDeploymentProperties); + streamDeploymentProperties); if (currentApp.getApplicationType() != ApplicationType.app) { if (upstreamAppSupportsPartition) { @@ -194,8 +268,8 @@ public List createRequests(StreamDefinition streamDefiniti isDownStreamAppPartitioned = isPartitionedConsumer(appDeployTimeProperties, upstreamAppSupportsPartition); } - logger.info(String.format("Creating resource with [%s] for application [%s]", - appRegistration.getUri().toString(), currentApp.getName())); + logger.info("Creating resource with [{}] for application [{}]", + appRegistration.getUri().toString(), currentApp.getName()); Resource appResource = this.appRegistry.getAppResource(appRegistration); Resource metadataResource = this.appRegistry.getAppMetadataResource(appRegistration); @@ -214,13 +288,12 @@ public List createRequests(StreamDefinition streamDefiniti // Merge *definition time* app properties with *deployment time* properties // and expand them to their long form if applicable AppDefinition revisedDefinition = mergeAndExpandAppProperties(currentApp, metadataResource, - appDeployTimeProperties); + appDeployTimeProperties); AppDeploymentRequest request = new AppDeploymentRequest(revisedDefinition, appResource, - deployerDeploymentProperties, commandlineArguments); + deployerDeploymentProperties, commandlineArguments); - logger.debug("Created AppDeploymentRequest = " + request.toString() + " AppDefinition = " - + request.getDefinition().toString()); + logger.debug("Created AppDeploymentRequest = {}, AppDefinition = {}", request, request.getDefinition()); appDeploymentRequests.add(request); } return appDeploymentRequests; @@ -229,39 +302,38 @@ public List createRequests(StreamDefinition streamDefiniti private void contributeCommonApplicationProperties(String platformType, Map appDeployTimeProperties) { String platformTypePrefix = platformType + "."; this.commonApplicationProperties.getStreamResourceProperties() - .ifPresent(defaults -> defaults.entrySet().stream() - .filter(e -> e.getValue() != null) - .filter(e -> e.getKey().toString().startsWith(platformTypePrefix)) - .forEach(e -> appDeployTimeProperties.putIfAbsent( - e.getKey().toString().replaceFirst(platformTypePrefix, ""), e.getValue().toString()))); + .ifPresent(defaults -> defaults.entrySet().stream() + .filter(e -> e.getValue() != null) + .filter(e -> e.getKey().toString().startsWith(platformTypePrefix)) + .forEach(e -> appDeployTimeProperties.putIfAbsent( + e.getKey().toString().replaceFirst(platformTypePrefix, ""), e.getValue().toString()))); } /** * Extract and return a map of properties for a specific app within the deployment * properties of a stream. * - * @param appDefinition the {@link StreamAppDefinition} for which to return a map of - * properties + * @param appDefinition the {@link StreamAppDefinition} for which to return a map of + * properties * @param streamDeploymentProperties deployment properties for the stream that the app is - * defined in + * defined in * @return map of properties for an app */ - /* default */ Map extractAppProperties(StreamAppDefinition appDefinition, - Map streamDeploymentProperties) { - Map appDeploymentProperties = new HashMap<>(); - appDeploymentProperties.putAll(this.commonApplicationProperties.getStream()); - // add properties with wild card prefix - String wildCardProducerPropertyPrefix = "app.*.producer."; - String wildCardConsumerPropertyPrefix = "app.*.consumer."; + /* default */ Map extractAppProperties( + StreamAppDefinition appDefinition, + Map streamDeploymentProperties + ) { + Map appDeploymentProperties = new HashMap<>(this.commonApplicationProperties.getStream()); String wildCardPrefix = "app.*."; - parseAndPopulateProperties(streamDeploymentProperties, appDeploymentProperties, wildCardProducerPropertyPrefix, - wildCardConsumerPropertyPrefix, wildCardPrefix); + parseAndPopulateProperties(streamDeploymentProperties, appDeploymentProperties, wildCardPrefix); // add application specific properties - String producerPropertyPrefix = String.format("app.%s.producer.", appDefinition.getName()); - String consumerPropertyPrefix = String.format("app.%s.consumer.", appDefinition.getName()); - String appPrefix = String.format("app.%s.", appDefinition.getName()); - parseAndPopulateProperties(streamDeploymentProperties, appDeploymentProperties, producerPropertyPrefix, - consumerPropertyPrefix, appPrefix); + List names = new ArrayList<>(); + names.add(String.format("app.%s.", appDefinition.getName())); + if (!appDefinition.getName().equals(appDefinition.getRegisteredAppName())) { + names.add(appDefinition.getRegisteredAppName()); + } + parseAndPopulateProperties(streamDeploymentProperties, appDeploymentProperties, names.toArray(new String[0])); + logger.debug("extractAppProperties:{}", appDeploymentProperties); return appDeploymentProperties; } @@ -269,45 +341,48 @@ private void contributeCommonApplicationProperties(String platformType, Map streamDeploymentProperties) { - Iterator iterator = StreamDefinitionServiceUtils.getDeploymentOrderIterator(this.streamDefinitionService.getAppDefinitions(streamDefinition)); + /* default */ boolean upstreamAppHasPartitionInfo( + StreamDefinition streamDefinition, StreamAppDefinition currentApp, + Map streamDeploymentProperties + ) { + Iterator iterator = StreamDefinitionServiceUtils.getDeploymentOrderIterator(this.streamDefinitionService.getAppDefinitions( + streamDefinition)); while (iterator.hasNext()) { StreamAppDefinition app = iterator.next(); if (app.equals(currentApp) && iterator.hasNext()) { StreamAppDefinition prevApp = iterator.next(); Map appDeploymentProperties = extractAppProperties(prevApp, streamDeploymentProperties); return appDeploymentProperties.containsKey(BindingPropertyKeys.OUTPUT_PARTITION_KEY_EXPRESSION) - || appDeploymentProperties - .containsKey(BindingPropertyKeys.OUTPUT_PARTITION_KEY_EXTRACTOR_CLASS); + || appDeploymentProperties + .containsKey(BindingPropertyKeys.OUTPUT_PARTITION_KEY_EXTRACTOR_CLASS); } } return false; } - /* default */ void parseAndPopulateProperties(Map streamDeploymentProperties, - Map appDeploymentProperties, String producerPropertyPrefix, - String consumerPropertyPrefix, - String appPrefix) { + /* default */ void parseAndPopulateProperties( + Map streamDeploymentProperties, + Map appDeploymentProperties, + String... prefixes + ) { for (Map.Entry entry : streamDeploymentProperties.entrySet()) { - if (entry.getKey().startsWith(appPrefix)) { - if (entry.getKey().startsWith(producerPropertyPrefix)) { - appDeploymentProperties.put(BindingPropertyKeys.OUTPUT_BINDING_KEY_PREFIX - + entry.getKey().substring(appPrefix.length()), entry.getValue()); - } - else if (entry.getKey().startsWith(consumerPropertyPrefix)) { - appDeploymentProperties.put( - BindingPropertyKeys.INPUT_BINDING_KEY_PREFIX + entry.getKey().substring(appPrefix.length()), - entry.getValue()); - } - else { - appDeploymentProperties.put(entry.getKey().substring(appPrefix.length()), entry.getValue()); + for (String prefix : prefixes) { + String key = entry.getKey(); + if (key.startsWith(prefix)) { + String value = entry.getValue(); + if (key.startsWith(prefix + "producer")) { + appDeploymentProperties.put(BindingPropertyKeys.OUTPUT_BINDING_KEY_PREFIX + key.substring(prefix.length()), value); + } else if (key.startsWith(prefix + "consumer")) { + appDeploymentProperties.put(BindingPropertyKeys.INPUT_BINDING_KEY_PREFIX + key.substring(prefix.length()), value); + } else { + appDeploymentProperties.put(key.substring(prefix.length()), value); + } } } } @@ -318,8 +393,11 @@ else if (entry.getKey().startsWith(consumerPropertyPrefix)) { * merged and short form parameters have been expanded to their long form (amongst the * included supported properties of the app) if applicable. */ - /* default */ AppDefinition mergeAndExpandAppProperties(StreamAppDefinition original, Resource metadataResource, - Map appDeployTimeProperties) { + /* default */ + AppDefinition mergeAndExpandAppProperties( + StreamAppDefinition original, Resource metadataResource, + Map appDeployTimeProperties + ) { Map merged = new HashMap<>(original.getProperties()); merged.putAll(appDeployTimeProperties); merged = this.visibleProperties.qualifyProperties(merged, metadataResource); @@ -330,11 +408,12 @@ else if (entry.getKey().startsWith(consumerPropertyPrefix)) { /** * Add app properties for producing partitioned data to the provided properties. * - * @param properties properties to update + * @param properties properties to update * @param nextInstanceCount the number of instances for the next (downstream) app in the - * stream + * stream */ - /* default */ void updateProducerPartitionProperties(Map properties, int nextInstanceCount) { + /* default */ + void updateProducerPartitionProperties(Map properties, int nextInstanceCount) { properties.put(BindingPropertyKeys.OUTPUT_PARTITION_COUNT, String.valueOf(nextInstanceCount)); if (!properties.containsKey(BindingPropertyKeys.OUTPUT_PARTITION_KEY_EXPRESSION)) { properties.put(BindingPropertyKeys.OUTPUT_PARTITION_KEY_EXPRESSION, DEFAULT_PARTITION_KEY_EXPRESSION); @@ -346,7 +425,8 @@ else if (entry.getKey().startsWith(consumerPropertyPrefix)) { * * @param properties properties to update */ - /* default */ void updateConsumerPartitionProperties(Map properties) { + /* default */ + void updateConsumerPartitionProperties(Map properties) { properties.put(BindingPropertyKeys.INPUT_PARTITIONED, "true"); } @@ -357,7 +437,8 @@ else if (entry.getKey().startsWith(consumerPropertyPrefix)) { * @return instance count indicated in the provided properties; if the properties do not * contain a count, a value of {@code 1} is returned */ - /* default */ int getInstanceCount(Map properties) { + /* default */ + int getInstanceCount(Map properties) { return Integer.parseInt(properties.getOrDefault(AppDeployer.COUNT_PROPERTY_KEY, "1")); } @@ -366,16 +447,19 @@ else if (entry.getKey().startsWith(consumerPropertyPrefix)) { * either by the deployment properties for the app or whether the previous (upstream) app * is publishing partitioned data. * - * @param appDeploymentProperties deployment properties for the app + * @param appDeploymentProperties deployment properties for the app * @param upstreamAppSupportsPartition if true, previous (upstream) app in the stream - * publishes partitioned data + * publishes partitioned data * @return true if the app consumes partitioned data */ - /* default */ boolean isPartitionedConsumer(Map appDeploymentProperties, - boolean upstreamAppSupportsPartition) { + /* default */ + boolean isPartitionedConsumer( + Map appDeploymentProperties, + boolean upstreamAppSupportsPartition + ) { return upstreamAppSupportsPartition - || (appDeploymentProperties.containsKey(BindingPropertyKeys.INPUT_PARTITIONED) - && appDeploymentProperties.get(BindingPropertyKeys.INPUT_PARTITIONED).equalsIgnoreCase("true")); + || (appDeploymentProperties.containsKey(BindingPropertyKeys.INPUT_PARTITIONED) + && appDeploymentProperties.get(BindingPropertyKeys.INPUT_PARTITIONED).equalsIgnoreCase("true")); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultSchedulerService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultSchedulerService.java index 9e6e4afef7..0d8a95cd8f 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultSchedulerService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultSchedulerService.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,15 +19,22 @@ import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.TreeMap; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.StreamSupport; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.audit.service.AuditServiceUtils; import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver; @@ -43,16 +50,19 @@ import org.springframework.cloud.dataflow.core.dsl.TaskParser; import org.springframework.cloud.dataflow.registry.service.AppRegistryService; import org.springframework.cloud.dataflow.rest.util.DeploymentPropertiesUtils; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.config.apps.CommonApplicationProperties; import org.springframework.cloud.dataflow.server.controller.VisibleProperties; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskDefinitionException; import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.service.SchedulerService; import org.springframework.cloud.dataflow.server.service.SchedulerServiceProperties; +import org.springframework.cloud.dataflow.server.service.TaskExecutionInfoService; import org.springframework.cloud.deployer.spi.core.AppDefinition; import org.springframework.cloud.deployer.spi.scheduler.ScheduleInfo; import org.springframework.cloud.deployer.spi.scheduler.ScheduleRequest; import org.springframework.cloud.task.listener.TaskException; +import org.springframework.core.env.PropertyResolver; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import org.springframework.data.domain.Page; @@ -70,81 +80,87 @@ */ public class DefaultSchedulerService implements SchedulerService { + private static final Logger logger = LoggerFactory.getLogger(DefaultSchedulerService.class); + private final static int MAX_SCHEDULE_NAME_LEN = 52; private CommonApplicationProperties commonApplicationProperties; + private List taskPlatforms; + private TaskDefinitionRepository taskDefinitionRepository; + private AppRegistryService registry; + private final TaskConfigurationProperties taskConfigurationProperties; + private final String dataflowServerUri; + private final VisibleProperties visibleProperties; + private final SchedulerServiceProperties schedulerServiceProperties; + private final AuditRecordService auditRecordService; + private final AuditServiceUtils auditServiceUtils; + private final DataSourceProperties dataSourceProperties; + private final ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties; + private final AggregateExecutionSupport aggregateExecutionSupport; + + private final TaskDefinitionReader taskDefinitionReader; + + private final TaskExecutionInfoService taskExecutionInfoService; + + private final PropertyResolver propertyResolver; + private static final Pattern TASK_NAME_PATTERN = Pattern.compile("[a-zA-Z]([-a-zA-Z0-9]*[a-zA-Z0-9])?"); + private static final String TASK_NAME_VALIDATION_MSG = "Task name must consist of alphanumeric characters " + "or '-', start with an alphabetic character, and end with an alphanumeric character (e.g. 'my-name', " + "or 'abc-123')"; /** * Constructor for DefaultSchedulerService - * @param commonApplicationProperties common properties for applications deployed via Spring Cloud Data Flow. - * @param taskPlatforms the {@link TaskPlatform}s for this service. - * @param taskDefinitionRepository the {@link TaskDefinitionRepository} for this service. - * @param registry the {@link AppRegistryService} for this service. - * @param resourceLoader the {@link ResourceLoader} for this service. - * @param taskConfigurationProperties the {@link TaskConfigurationProperties} for this service. - * @param dataSourceProperties the {@link DataSourceProperties} for this service. - * @param dataflowServerUri the Spring Cloud Data Flow uri for this service. - * @param metaDataResolver the {@link ApplicationConfigurationMetadataResolver} for this service. - * @param schedulerServiceProperties the {@link SchedulerServiceProperties} for this service. - * @param auditRecordService the {@link AuditRecordService} for this service. - */ - @Deprecated - public DefaultSchedulerService(CommonApplicationProperties commonApplicationProperties, - List taskPlatforms, TaskDefinitionRepository taskDefinitionRepository, - AppRegistryService registry, ResourceLoader resourceLoader, - TaskConfigurationProperties taskConfigurationProperties, - DataSourceProperties dataSourceProperties, String dataflowServerUri, - ApplicationConfigurationMetadataResolver metaDataResolver, - SchedulerServiceProperties schedulerServiceProperties, - AuditRecordService auditRecordService) { - - this(commonApplicationProperties, taskPlatforms, taskDefinitionRepository, registry, resourceLoader, - taskConfigurationProperties, dataSourceProperties, dataflowServerUri, metaDataResolver, - schedulerServiceProperties, auditRecordService, null); - } - - /** - * Constructor for DefaultSchedulerService - * @param commonApplicationProperties common properties for applications deployed via Spring Cloud Data Flow. - * @param taskPlatforms the {@link TaskPlatform}s for this service. - * @param taskDefinitionRepository the {@link TaskDefinitionRepository} for this service. - * @param registry the {@link AppRegistryService} for this service. - * @param resourceLoader the {@link ResourceLoader} for this service. - * @param taskConfigurationProperties the {@link TaskConfigurationProperties} for this service. - * @param dataSourceProperties the {@link DataSourceProperties} for this service. - * @param dataflowServerUri the Spring Cloud Data Flow uri for this service. - * @param metaDataResolver the {@link ApplicationConfigurationMetadataResolver} for this service. - * @param schedulerServiceProperties the {@link SchedulerServiceProperties} for this service. - * @param auditRecordService the {@link AuditRecordService} for this service. - * @param composedTaskRunnerConfigurationProperties the {@link ComposedTaskRunnerConfigurationProperties} for this - * service + * + * @param commonApplicationProperties common properties for applications deployed via Spring Cloud Data Flow. + * @param taskPlatforms the {@link TaskPlatform}s for this service. + * @param taskDefinitionRepository the {@link TaskDefinitionRepository} for this service. + * @param registry the {@link AppRegistryService} for this service. + * @param resourceLoader the {@link ResourceLoader} for this service. + * @param taskConfigurationProperties the {@link TaskConfigurationProperties} for this service. + * @param dataSourceProperties the {@link DataSourceProperties} for this service. + * @param dataflowServerUri the Spring Cloud Data Flow uri for this service. + * @param metaDataResolver the {@link ApplicationConfigurationMetadataResolver} for this service. + * @param schedulerServiceProperties the {@link SchedulerServiceProperties} for this service. + * @param auditRecordService the {@link AuditRecordService} for this service. + * @param aggregateExecutionSupport the {@link AggregateExecutionSupport} for this service + * @param taskDefinitionReader the {@link TaskDefinitionReader} for this service + * @param taskExecutionInfoService the {@link TaskExecutionInfoService} for this service + * @param propertyResolver the {@link PropertyResolver} for this service + * @param composedTaskRunnerConfigurationProperties the {@link ComposedTaskRunnerConfigurationProperties} for this service */ - public DefaultSchedulerService(CommonApplicationProperties commonApplicationProperties, - List taskPlatforms, TaskDefinitionRepository taskDefinitionRepository, - AppRegistryService registry, ResourceLoader resourceLoader, + public DefaultSchedulerService( + CommonApplicationProperties commonApplicationProperties, + List taskPlatforms, + TaskDefinitionRepository taskDefinitionRepository, + AppRegistryService registry, + ResourceLoader resourceLoader, TaskConfigurationProperties taskConfigurationProperties, - DataSourceProperties dataSourceProperties, String dataflowServerUri, + DataSourceProperties dataSourceProperties, + String dataflowServerUri, ApplicationConfigurationMetadataResolver metaDataResolver, SchedulerServiceProperties schedulerServiceProperties, AuditRecordService auditRecordService, - ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties) { + AggregateExecutionSupport aggregateExecutionSupport, + TaskDefinitionReader taskDefinitionReader, + TaskExecutionInfoService taskExecutionInfoService, + PropertyResolver propertyResolver, + ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties + ) { Assert.notNull(commonApplicationProperties, "commonApplicationProperties must not be null"); Assert.notNull(taskPlatforms, "taskPlatforms must not be null"); Assert.notNull(registry, "AppRegistryService must not be null"); @@ -155,6 +171,10 @@ public DefaultSchedulerService(CommonApplicationProperties commonApplicationProp Assert.notNull(schedulerServiceProperties, "schedulerServiceProperties must not be null"); Assert.notNull(auditRecordService, "AuditRecordService must not be null"); Assert.notNull(dataSourceProperties, "dataSourceProperties must not be null"); + Assert.notNull(aggregateExecutionSupport, "aggregateExecutionSupport must not be null"); + Assert.notNull(taskDefinitionReader, "taskDefinitionReader must not be null"); + Assert.notNull(taskExecutionInfoService, "taskExecutionInfoService must not be null"); + Assert.notNull(propertyResolver, "propertyResolver must not be null"); this.commonApplicationProperties = commonApplicationProperties; this.taskPlatforms = taskPlatforms; this.taskDefinitionRepository = taskDefinitionRepository; @@ -166,18 +186,27 @@ public DefaultSchedulerService(CommonApplicationProperties commonApplicationProp this.auditRecordService = auditRecordService; this.auditServiceUtils = new AuditServiceUtils(); this.dataSourceProperties = dataSourceProperties; + this.aggregateExecutionSupport = aggregateExecutionSupport; + this.taskDefinitionReader = taskDefinitionReader; + this.taskExecutionInfoService = taskExecutionInfoService; + this.propertyResolver = propertyResolver; this.composedTaskRunnerConfigurationProperties = composedTaskRunnerConfigurationProperties; } @Override - public void schedule(String scheduleName, String taskDefinitionName, Map taskDeploymentProperties, - List commandLineArgs) { + public void schedule( + String scheduleName, String taskDefinitionName, Map taskDeploymentProperties, + List commandLineArgs + ) { schedule(scheduleName, taskDefinitionName, taskDeploymentProperties, commandLineArgs, null); } + @SuppressWarnings("DuplicatedCode") @Override - public void schedule(String scheduleName, String taskDefinitionName, Map taskDeploymentProperties, - List commandLineArgs, String platformName) { + public void schedule( + String scheduleName, String taskDefinitionName, Map taskDeploymentProperties, + List commandLineArgs, String platformName + ) { String platformType = StreamSupport.stream(getLaunchers().spliterator(), true) .filter(deployer -> deployer.getName().equalsIgnoreCase(platformName)) .map(Launcher::getType) @@ -188,33 +217,80 @@ public void schedule(String scheduleName, String taskDefinitionName, Map new NoSuchTaskDefinitionException(taskDefinitionName)); + TaskDefinition taskDefinition = this.taskDefinitionRepository.findById(taskDefinitionName).orElse(null); + if (taskDefinition == null) { + throw new NoSuchTaskDefinitionException(taskDefinitionName); + } + + String taskAppName = taskDefinition.getRegisteredAppName(); + String taskLabel = taskDefinition.getAppDefinition().getName(); + String version = taskDeploymentProperties.get("version." + taskLabel); + if (version == null) { + version = taskDeploymentProperties.get("version." + taskAppName); + } + + + SchemaVersionTarget schemaVersionTarget = aggregateExecutionSupport.findSchemaVersionTarget(taskAppName, version, taskDefinition); + Assert.notNull(schemaVersionTarget, "schemaVersionTarget not found for " + taskAppName); TaskParser taskParser = new TaskParser(taskDefinition.getName(), taskDefinition.getDslText(), true, true); TaskNode taskNode = taskParser.parse(); AppRegistration appRegistration; // if composed task definition replace definition with one composed task // runner and executable graph. if (taskNode.isComposed()) { - taskDefinition = new TaskDefinition(taskDefinition.getName(), - TaskServiceUtils.createComposedTaskDefinition( - taskNode.toExecutableDSL())); - taskDeploymentProperties = TaskServiceUtils.establishComposedTaskProperties(taskDeploymentProperties, taskNode); - TaskServiceUtils.addImagePullSecretProperty(taskDeploymentProperties, - this.composedTaskRunnerConfigurationProperties); + taskDefinition = new TaskDefinition(taskDefinition.getName(), TaskServiceUtils.createComposedTaskDefinition(taskNode.toExecutableDSL())); + Map establishedComposedTaskProperties = TaskServiceUtils.establishComposedTaskProperties(taskDeploymentProperties, taskNode); + taskDeploymentProperties.putAll(establishedComposedTaskProperties); + TaskServiceUtils.addImagePullSecretProperty(taskDeploymentProperties, this.composedTaskRunnerConfigurationProperties); try { - appRegistration = new AppRegistration(ComposedTaskRunnerConfigurationProperties.COMPOSED_TASK_RUNNER_NAME, - ApplicationType.task, new URI(TaskServiceUtils.getComposedTaskLauncherUri(this.taskConfigurationProperties, - this.composedTaskRunnerConfigurationProperties))); - } - catch (URISyntaxException e) { + appRegistration = new AppRegistration( + ComposedTaskRunnerConfigurationProperties.COMPOSED_TASK_RUNNER_NAME, + ApplicationType.task, + new URI(TaskServiceUtils.getComposedTaskLauncherUri( + this.taskConfigurationProperties, + this.composedTaskRunnerConfigurationProperties) + ) + ); + } catch (URISyntaxException e) { throw new IllegalStateException("Invalid Compose Task Runner Resource", e); } - } - else { + Set appNames = taskExecutionInfoService.composedTaskChildNames(taskDefinition.getName()); + + logger.info("composedTask:dsl={}:appNames:{}", taskDefinition.getDslText(), appNames); + addPrefixProperties(schemaVersionTarget, "app.composed-task-runner.", taskDeploymentProperties); + addPrefixProperties(schemaVersionTarget, "app." + scheduleName + ".", taskDeploymentProperties); + for (String appName : appNames) { + List names = new ArrayList<>(Arrays.asList(StringUtils.delimitedListToStringArray(appName, ","))); + String registeredName = names.get(0); + String appId = registeredName; + if (names.size() > 1) { + appId = names.get(1); + } + String appVersion = taskDeploymentProperties.get("version." + taskAppName + "-" + appId + "." + appId); + if(!StringUtils.hasText(appVersion)) { + appVersion = taskDeploymentProperties.get("version." + taskAppName + "-" + appId); + } + if(!StringUtils.hasText(appVersion)) { + appVersion = taskDeploymentProperties.get("version." + appId); + } + SchemaVersionTarget appSchemaTarget = this.aggregateExecutionSupport.findSchemaVersionTarget(registeredName, appVersion, taskDefinitionReader); + logger.debug("ctr:{}:registeredName={}, version={}, schemaTarget={}", names, registeredName, appVersion, appSchemaTarget.getName()); + taskDeploymentProperties.put("app.composed-task-runner.composed-task-app-properties.app." + scheduleName + "-" + appId + ".spring.cloud.task.tablePrefix", + appSchemaTarget.getTaskPrefix()); + taskDeploymentProperties.put("app.composed-task-runner.composed-task-app-properties.app." + appId + ".spring.cloud.task.tablePrefix", + appSchemaTarget.getTaskPrefix()); + taskDeploymentProperties.put("app." + scheduleName + "-" + appId + ".spring.batch.jdbc.table-prefix", appSchemaTarget.getBatchPrefix()); + taskDeploymentProperties.put("app." + registeredName + ".spring.batch.jdbc.table-prefix", appSchemaTarget.getBatchPrefix()); + } + logger.debug("ctr:added:{}:{}", scheduleName, taskDeploymentProperties); + commandLineArgs = TaskServiceUtils.convertCommandLineArgsToCTRFormat(commandLineArgs); + } else { appRegistration = this.registry.find(taskDefinition.getRegisteredAppName(), ApplicationType.task); + addPrefixCommandLineArgs(schemaVersionTarget, "app." + taskDefinition.getRegisteredAppName() + ".", commandLineArgs); + addPrefixProperties(schemaVersionTarget, "app." + taskDefinition.getRegisteredAppName() + ".", taskDeploymentProperties); } + addDefaultDeployerProperties(platformType, schemaVersionTarget, taskDeploymentProperties); Assert.notNull(appRegistration, "Unknown task app: " + taskDefinition.getRegisteredAppName()); Resource metadataResource = this.registry.getAppMetadataResource(appRegistration); Launcher launcher = getTaskLauncher(platformName); @@ -222,8 +298,7 @@ ApplicationType.task, new URI(TaskServiceUtils.getComposedTaskLauncherUri(this.t TaskServiceUtils.addDatabaseCredentials(this.taskConfigurationProperties.isUseKubernetesSecretsForDbCredentials(), launcher.getType())); Map appDeploymentProperties = new HashMap<>(commonApplicationProperties.getTask()); - appDeploymentProperties.putAll( - TaskServiceUtils.extractAppProperties(taskDefinition.getRegisteredAppName(), taskDeploymentProperties)); + appDeploymentProperties.putAll(TaskServiceUtils.extractAppProperties(taskDefinition.getRegisteredAppName(), taskDeploymentProperties)); // Merge the common properties defined via the spring.cloud.dataflow.common-properties.task-resource file. // Doesn't override existing properties! @@ -244,7 +319,12 @@ ApplicationType.task, new URI(TaskServiceUtils.getComposedTaskLauncherUri(this.t taskDeploymentProperties = extractAndQualifySchedulerProperties(taskDeploymentProperties); deployerDeploymentProperties.putAll(taskDeploymentProperties); scheduleName = validateScheduleNameForPlatform(launcher.getType(), scheduleName); - ScheduleRequest scheduleRequest = new ScheduleRequest(revisedDefinition, deployerDeploymentProperties, commandLineArgs, scheduleName, getTaskResource(taskDefinitionName)); + ScheduleRequest scheduleRequest = new ScheduleRequest(revisedDefinition, + deployerDeploymentProperties, + commandLineArgs, + scheduleName, + getTaskResource(taskDefinitionName, version)); + launcher.getScheduler().schedule(scheduleRequest); this.auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE, @@ -252,6 +332,73 @@ ApplicationType.task, new URI(TaskServiceUtils.getComposedTaskLauncherUri(this.t launcher.getName()); } + private void addDefaultDeployerProperties( + String platformType, + SchemaVersionTarget schemaVersionTarget, + Map deploymentProperties + ) { + String bootVersion = schemaVersionTarget.getSchemaVersion().getBootVersion(); + switch (platformType) { + case TaskPlatformFactory.LOCAL_PLATFORM_TYPE: { + String javaHome = propertyResolver.getProperty("spring.cloud.dataflow.defaults.boot" + bootVersion + ".local.javaHomePath"); + if (StringUtils.hasText(javaHome)) { + String property = "spring.cloud.deployer.local.javaHomePath." + bootVersion; + addProperty(property, javaHome, deploymentProperties); + } + break; + } + case TaskPlatformFactory.CLOUDFOUNDRY_PLATFORM_TYPE: { + String buildpack = propertyResolver.getProperty("spring.cloud.dataflow.defaults.boot" + bootVersion + ".cloudfoundry.buildpack"); + if (StringUtils.hasText(buildpack)) { + String property = "spring.cloud.deployer.cloudfoundry.buildpack"; + addProperty(property, buildpack, deploymentProperties); + } + String buildpacks = propertyResolver.getProperty("spring.cloud.dataflow.defaults.boot" + bootVersion + ".cloudfoundry.buildpacks"); + if (StringUtils.hasText(buildpacks)) { + String property = "spring.cloud.deployer.cloudfoundry.buildpacks"; + addProperty(property, buildpacks, deploymentProperties); + } + break; + } + } + } + + private static void addProperty(String property, String value, Map properties) { + if (properties.containsKey(property)) { + logger.debug("exists:{}={}", property, properties.get(property)); + } else { + logger.debug("adding:{}={}", property, value); + properties.put(property, value); + } + } + + private static void addPrefixProperties(SchemaVersionTarget schemaVersionTarget, String prefix, Map deploymentProperties) { + addProperty(prefix + "spring.cloud.task.initialize-enabled", "false", deploymentProperties); + addProperty(prefix + "spring.batch.jdbc.table-prefix", schemaVersionTarget.getBatchPrefix(), deploymentProperties); + addProperty(prefix + "spring.cloud.task.tablePrefix", schemaVersionTarget.getTaskPrefix(), deploymentProperties); + addProperty(prefix + "spring.cloud.task.schemaTarget", schemaVersionTarget.getName(), deploymentProperties); + addProperty(prefix + "spring.cloud.deployer.bootVersion", schemaVersionTarget.getSchemaVersion().getBootVersion(), deploymentProperties); + } + + private static void addPrefixCommandLineArgs(SchemaVersionTarget schemaVersionTarget, String prefix, List commandLineArgs) { + addCommandLine(prefix + "spring.cloud.task.initialize-enabled", "false", commandLineArgs); + addCommandLine(prefix + "spring.batch.jdbc.table-prefix", schemaVersionTarget.getBatchPrefix(), commandLineArgs); + addCommandLine(prefix + "spring.cloud.task.tablePrefix", schemaVersionTarget.getTaskPrefix(), commandLineArgs); + addCommandLine(prefix + "spring.cloud.task.schemaTarget", schemaVersionTarget.getName(), commandLineArgs); + addCommandLine(prefix + "spring.cloud.deployer.bootVersion", schemaVersionTarget.getSchemaVersion().getBootVersion(), commandLineArgs); + } + + private static void addCommandLine(String property, String value, List commandLineArgs) { + String argPrefix = "--" + property + "="; + if(commandLineArgs.stream().noneMatch(item -> item.startsWith(argPrefix))) { + String arg = argPrefix + value; + commandLineArgs.add(arg); + logger.debug("adding:{}", arg); + } else { + logger.debug("exists:{}", argPrefix); + } + } + private String validateScheduleNameForPlatform(String type, String scheduleName) { if (type.equals(TaskPlatformFactory.KUBERNETES_PLATFORM_TYPE)) { if (scheduleName.length() > MAX_SCHEDULE_NAME_LEN) { @@ -296,8 +443,7 @@ private Launcher getTaskLauncher(String platformName) { } if (platformName != null && launcherToUse == null) { throw new IllegalArgumentException(String.format("The platform %s does not support a scheduler service.", platformName)); - } - else if (platformName == null && launcherToUse == null) { + } else if (platformName == null && launcherToUse == null) { throw new IllegalStateException("Could not find a default scheduler."); } return launcherToUse; @@ -306,9 +452,7 @@ else if (platformName == null && launcherToUse == null) { private List getLaunchers() { List launchers = new ArrayList<>(); for (TaskPlatform taskPlatform : this.taskPlatforms) { - for (Launcher launcher : taskPlatform.getLaunchers()) { - launchers.add(launcher); - } + launchers.addAll(taskPlatform.getLaunchers()); } return launchers; } @@ -409,7 +553,7 @@ public ScheduleInfo getSchedule(String scheduleName, String platformName) { .filter(scheduleInfo -> scheduleInfo.getScheduleName().equals(scheduleName)) .collect(Collectors.toList()); Assert.isTrue(!(result.size() > 1), "more than one schedule was returned for scheduleName, should only be one"); - return result.size() > 0 ? result.get(0) : null; + return !result.isEmpty() ? result.get(0) : null; } @Override @@ -417,8 +561,10 @@ public ScheduleInfo getSchedule(String scheduleName) { return getSchedule(scheduleName, null); } - private List limitScheduleInfoResultSize(List resultSet, - int schedulerLimitResultSize) { + private List limitScheduleInfoResultSize( + List resultSet, + int schedulerLimitResultSize + ) { if (resultSet.size() > schedulerLimitResultSize) { resultSet = resultSet.subList(0, schedulerLimitResultSize); } @@ -444,7 +590,7 @@ private static Map extractAndQualifySchedulerProperties(Map fromApp)); } - protected Resource getTaskResource(String taskDefinitionName) { + protected Resource getTaskResource(String taskDefinitionName, String version) { TaskDefinition taskDefinition = this.taskDefinitionRepository.findById(taskDefinitionName) .orElseThrow(() -> new NoSuchTaskDefinitionException(taskDefinitionName)); AppRegistration appRegistration = null; @@ -454,15 +600,19 @@ protected Resource getTaskResource(String taskDefinitionName) { this.composedTaskRunnerConfigurationProperties); try { composedTaskUri = new URI(composedTaskLauncherUri); - } - catch (URISyntaxException e) { + } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid Composed Task Url: " + composedTaskLauncherUri); } appRegistration = new AppRegistration(ComposedTaskRunnerConfigurationProperties.COMPOSED_TASK_RUNNER_NAME, ApplicationType.task, composedTaskUri); - } - else { - appRegistration = this.registry.find(taskDefinition.getRegisteredAppName(), + } else { + if(version != null) { + appRegistration = this.registry.find(taskDefinition.getRegisteredAppName(), + ApplicationType.task, version); + } + else { + appRegistration = this.registry.find(taskDefinition.getRegisteredAppName(), ApplicationType.task); + } } Assert.notNull(appRegistration, "Unknown task app: " + taskDefinition.getRegisteredAppName()); return this.registry.getAppResource(appRegistration); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultStreamService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultStreamService.java index f97293c8d3..b18b85cc93 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultStreamService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultStreamService.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 the original author or authors. + * Copyright 2017-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.springframework.cloud.dataflow.server.service.impl; import java.util.ArrayList; @@ -29,7 +30,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.yaml.snakeyaml.DumperOptions; +import org.yaml.snakeyaml.LoaderOptions; import org.yaml.snakeyaml.Yaml; +import org.yaml.snakeyaml.constructor.SafeConstructor; +import org.yaml.snakeyaml.representer.Representer; import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.audit.service.AuditServiceUtils; @@ -84,6 +88,7 @@ * @author Christian Tzolov * @author Gunnar Hillert * @author Chris Schaefer + * @author Chris Bono */ @Transactional public class DefaultStreamService implements StreamService { @@ -238,20 +243,19 @@ private void updateStreamDefinitionFromReleaseManifest(String streamName, String streamDefinition.getOriginalDslText(), streamDefinition.getDescription()); logger.debug("Updated StreamDefinition: " + updatedStreamDefinition); - // TODO consider adding an explicit UPDATE method to the streamDefRepository - // Note: Not transactional and can lead to loosing the stream definition + // NOTE: Not transactional and can lead to losing the stream definition this.streamDefinitionRepository.delete(updatedStreamDefinition); this.streamDefinitionRepository.save(updatedStreamDefinition); this.auditRecordService.populateAndSaveAuditRecord( AuditOperationType.STREAM, AuditActionType.UPDATE, streamName, - updatedStreamDefinition.getDslText(), null); + this.streamDefinitionService.redactDsl(updatedStreamDefinition), null); } @Override public void scaleApplicationInstances(String streamName, String appName, int count, Map properties) { // Skipper expects app names / labels not deployment ids - logger.info(String.format("Scale %s:%s to %s with properties: %s", streamName, appName, count, properties)); + logger.info("Scale {}:{} to {} with properties: {}", streamName, appName, count, properties); this.skipperStreamDeployer.scale(streamName, appName, count, properties); } @@ -340,7 +344,7 @@ String convertPropertiesToSkipperYaml(StreamDefinition streamDefinition, if (hasProps) { appMap.put(SpringCloudDeployerApplicationManifest.SPEC_STRING, specMap); } - if (appMap.size() != 0) { + if (!appMap.isEmpty()) { skipperConfigValuesMap.put(appName, appMap); } } @@ -349,7 +353,7 @@ String convertPropertiesToSkipperYaml(StreamDefinition streamDefinition, dumperOptions.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK); dumperOptions.setPrettyFlow(true); dumperOptions.setLineBreak(DumperOptions.LineBreak.getPlatformLineBreak()); - Yaml yaml = new Yaml(dumperOptions); + Yaml yaml = new Yaml(new SafeConstructor(new LoaderOptions()), new Representer(dumperOptions), dumperOptions); return yaml.dump(skipperConfigValuesMap); } else { @@ -389,19 +393,9 @@ public StreamDeployment info(String streamName) { return this.skipperStreamDeployer.getStreamInfo(streamName); } - /** - * Create a new stream. - * - * @param streamName stream name - * @param dsl DSL definition for stream - * @param description description of the stream definition - * @param deploy if {@code true}, the stream is deployed upon creation (default is - * {@code false}) - * @return the created stream definition already exists - * @throws InvalidStreamDefinitionException if there are errors in parsing the stream DSL, - * resolving the name, or type of applications in the stream - */ - public StreamDefinition createStream(String streamName, String dsl, String description, boolean deploy) { + @Override + public StreamDefinition createStream(String streamName, String dsl, String description, boolean deploy, + Map deploymentProperties) { StreamDefinition streamDefinition = createStreamDefinition(streamName, dsl, description); List errorMessages = new ArrayList<>(); @@ -428,7 +422,7 @@ public StreamDefinition createStream(String streamName, String dsl, String descr final StreamDefinition savedStreamDefinition = this.streamDefinitionRepository.save(streamDefinition); if (deploy) { - this.deployStream(streamName, new HashMap<>()); + this.deployStream(streamName, deploymentProperties); } auditRecordService.populateAndSaveAuditRecord( @@ -563,7 +557,7 @@ private Set findRelatedDefinitions(StreamDefinition currentStr */ public Page findDefinitionByNameContains(Pageable pageable, String search) { Page streamDefinitions; - if (search != null) { + if (StringUtils.hasLength(search)) { streamDefinitions = streamDefinitionRepository.findByNameContains(search, pageable); } else { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteService.java index bdce967c6b..8059e31c30 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteService.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2019 the original author or authors. + * Copyright 2016-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,7 +18,6 @@ import java.util.Collection; import java.util.HashSet; -import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -26,6 +25,7 @@ import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; @@ -34,31 +34,35 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.core.AuditActionType; import org.springframework.cloud.dataflow.core.AuditOperationType; import org.springframework.cloud.dataflow.core.Launcher; import org.springframework.cloud.dataflow.core.TaskDefinition; import org.springframework.cloud.dataflow.core.TaskDeployment; +import org.springframework.cloud.dataflow.core.database.support.DatabaseType; import org.springframework.cloud.dataflow.core.dsl.TaskNode; import org.springframework.cloud.dataflow.core.dsl.TaskParser; import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; import org.springframework.cloud.dataflow.server.controller.support.TaskExecutionControllerDeleteAction; import org.springframework.cloud.dataflow.server.job.LauncherRepository; -import org.springframework.cloud.dataflow.server.repository.CannotDeleteNonParentTaskExecutionException; import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDao; +import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDaoContainer; import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDaoContainer; import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDaoContainer; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskDefinitionException; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskExecutionException; import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository; -import org.springframework.cloud.dataflow.server.repository.support.DatabaseType; import org.springframework.cloud.dataflow.server.service.SchedulerService; import org.springframework.cloud.dataflow.server.service.TaskDeleteService; import org.springframework.cloud.deployer.spi.task.TaskLauncher; -import org.springframework.cloud.task.repository.TaskExecution; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.orm.ObjectOptimisticLockingFailureException; import org.springframework.transaction.annotation.Transactional; @@ -80,6 +84,8 @@ * @author Michael Wirth * @author David Turanski * @author Daniel Serleg + * @author Corneil du Plessis + * @author Joe O'Brien */ public class DefaultTaskDeleteService implements TaskDeleteService { @@ -92,7 +98,7 @@ public class DefaultTaskDeleteService implements TaskDeleteService { /** * Used to read TaskExecutions. */ - private final TaskExplorer taskExplorer; + private final AggregateTaskExplorer taskExplorer; private final LauncherRepository launcherRepository; @@ -102,38 +108,44 @@ public class DefaultTaskDeleteService implements TaskDeleteService { protected final AuditRecordService auditRecordService; - protected final DataflowTaskExecutionDao dataflowTaskExecutionDao; + protected final DataflowTaskExecutionDaoContainer dataflowTaskExecutionDaoContainer; - protected final DataflowJobExecutionDao dataflowJobExecutionDao; + protected final DataflowJobExecutionDaoContainer dataflowJobExecutionDaoContainer; - protected final DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao; + protected final DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer; - private SchedulerService schedulerService; + private final SchedulerService schedulerService; private final ArgumentSanitizer argumentSanitizer = new ArgumentSanitizer(); - private int taskDeleteChunkSize; + private final SchemaService schemaService; - private DataSource dataSource; + private final int taskDeleteChunkSize; - public DefaultTaskDeleteService(TaskExplorer taskExplorer, LauncherRepository launcherRepository, + private final DataSource dataSource; + + public DefaultTaskDeleteService( + AggregateTaskExplorer taskExplorer, + LauncherRepository launcherRepository, TaskDefinitionRepository taskDefinitionRepository, TaskDeploymentRepository taskDeploymentRepository, AuditRecordService auditRecordService, - DataflowTaskExecutionDao dataflowTaskExecutionDao, - DataflowJobExecutionDao dataflowJobExecutionDao, - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, + DataflowTaskExecutionDaoContainer dataflowTaskExecutionDaoContainer, + DataflowJobExecutionDaoContainer dataflowJobExecutionDaoContainer, + DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer, SchedulerService schedulerService, + SchemaService schemaService, TaskConfigurationProperties taskConfigurationProperties, - DataSource dataSource) { + DataSource dataSource + ) { Assert.notNull(taskExplorer, "TaskExplorer must not be null"); Assert.notNull(launcherRepository, "LauncherRepository must not be null"); Assert.notNull(taskDefinitionRepository, "TaskDefinitionRepository must not be null"); Assert.notNull(taskDeploymentRepository, "TaskDeploymentRepository must not be null"); Assert.notNull(auditRecordService, "AuditRecordService must not be null"); - Assert.notNull(dataflowTaskExecutionDao, "DataflowTaskExecutionDao must not be null"); - Assert.notNull(dataflowJobExecutionDao, "DataflowJobExecutionDao must not be null"); - Assert.notNull(dataflowTaskExecutionMetadataDao, "DataflowTaskExecutionMetadataDao must not be null"); + Assert.notNull(dataflowTaskExecutionDaoContainer, "DataflowTaskExecutionDaoContainer must not be null"); + Assert.notNull(dataflowJobExecutionDaoContainer, "DataflowJobExecutionDaoContainer must not be null"); + Assert.notNull(dataflowTaskExecutionMetadataDaoContainer, "DataflowTaskExecutionMetadataDaoContainer must not be null"); Assert.notNull(taskConfigurationProperties, "TaskConfigurationProperties must not be null"); Assert.notNull(dataSource, "DataSource must not be null"); @@ -142,103 +154,197 @@ public DefaultTaskDeleteService(TaskExplorer taskExplorer, LauncherRepository la this.taskDefinitionRepository = taskDefinitionRepository; this.taskDeploymentRepository = taskDeploymentRepository; this.auditRecordService = auditRecordService; - this.dataflowTaskExecutionDao = dataflowTaskExecutionDao; - this.dataflowJobExecutionDao = dataflowJobExecutionDao; - this.dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDao; + this.dataflowTaskExecutionDaoContainer = dataflowTaskExecutionDaoContainer; + this.dataflowJobExecutionDaoContainer = dataflowJobExecutionDaoContainer; + this.dataflowTaskExecutionMetadataDaoContainer = dataflowTaskExecutionMetadataDaoContainer; this.schedulerService = schedulerService; + this.schemaService = schemaService; this.taskDeleteChunkSize = taskConfigurationProperties.getExecutionDeleteChunkSize(); this.dataSource = dataSource; } @Override - public void cleanupExecution(long id) { - TaskExecution taskExecution = taskExplorer.getTaskExecution(id); + @Transactional + public void cleanupExecution(long id, String schemaTarget) { + performCleanupExecution(id, schemaTarget); + } + + private void performCleanupExecution(long id, String schemaTarget) { + AggregateTaskExecution taskExecution = taskExplorer.getTaskExecution(id, schemaTarget); Assert.notNull(taskExecution, "There was no task execution with id " + id); String launchId = taskExecution.getExternalExecutionId(); if (!StringUtils.hasText(launchId)) { - logger.warn(String.format("Did not find External execution ID for taskName = [%s], taskId = [%s]. Nothing to clean up.", - taskExecution.getTaskName(), id)); + logger.warn("Did not find External execution ID for taskName = [{}], taskId = [{}]. Nothing to clean up.", taskExecution.getTaskName(), id); return; } TaskDeployment taskDeployment = this.taskDeploymentRepository.findByTaskDeploymentId(launchId); if (taskDeployment == null) { - logger.warn(String.format("Did not find TaskDeployment for taskName = [%s], taskId = [%s]. Nothing to clean up.", - taskExecution.getTaskName(), id)); + logger.warn("Did not find TaskDeployment for taskName = [{}], taskId = [{}]. Nothing to clean up.", taskExecution.getTaskName(), id); return; } Launcher launcher = launcherRepository.findByName(taskDeployment.getPlatformName()); if (launcher != null) { TaskLauncher taskLauncher = launcher.getTaskLauncher(); taskLauncher.cleanup(launchId); - } - else { - logger.info( - "Could clean up execution for task id " + id + ". Did not find a task platform named " + - taskDeployment.getPlatformName()); + } else { + logger.info("Could clean up execution for task id " + id + ". Did not find a task platform named " + taskDeployment.getPlatformName()); } } @Override - public void cleanupExecutions(Set actionsAsSet, Set ids) { - final SortedSet nonExistingTaskExecutions = new TreeSet<>(); - final SortedSet nonParentTaskExecutions = new TreeSet<>(); - final SortedSet deletableTaskExecutions = new TreeSet<>(); + @Transactional + public void cleanupExecutions(Set actionsAsSet, String taskName, boolean completed) { + cleanupExecutions(actionsAsSet, taskName, completed, null); + } - for (Long id : ids) { - final TaskExecution taskExecution = this.taskExplorer.getTaskExecution(id); - if (taskExecution == null) { - nonExistingTaskExecutions.add(id); + @Override + @Transactional + public void cleanupExecutions(Set actionsAsSet, String taskName, boolean completed, Integer days) { + List tasks; + if (days != null) { + tasks = this.taskExplorer.findTaskExecutionsBeforeEndTime(taskName, TaskServicesDateUtils.numDaysAgoFromLocalMidnightToday(days)); + } else { + tasks = this.taskExplorer.findTaskExecutions(taskName, completed); + } + final Set parentExecutions = new HashSet<>(); + final Set childExecutions = new HashSet<>(); + boolean removeData = actionsAsSet.contains(TaskExecutionControllerDeleteAction.REMOVE_DATA); + boolean cleanUp = actionsAsSet.contains(TaskExecutionControllerDeleteAction.CLEANUP); + for (AggregateTaskExecution taskExecution : tasks) { + if (taskExecution.getParentExecutionId() == null) { + parentExecutions.add(taskExecution); + } else { + childExecutions.add(taskExecution); + } + } + if (cleanUp) { + for (AggregateTaskExecution taskExecution : tasks) { + this.performCleanupExecution(taskExecution.getExecutionId(), taskExecution.getSchemaTarget()); } - else { - final Long parentExecutionId = taskExecution.getParentExecutionId(); + } - if (parentExecutionId != null) { - nonParentTaskExecutions.add(parentExecutionId); - } - else { - deletableTaskExecutions.add(taskExecution.getExecutionId()); + if (removeData) { + if (!childExecutions.isEmpty()) { + deleteTaskExecutions(childExecutions); + } + if (!parentExecutions.isEmpty()) { + Map> parents = parentExecutions.stream() + .collect(Collectors.groupingBy(AggregateTaskExecution::getSchemaTarget)); + for (String schemaTarget : parents.keySet()) { + SortedSet parentIds = parents.get(schemaTarget) + .stream() + .map(AggregateTaskExecution::getExecutionId) + .collect(Collectors.toCollection(TreeSet::new)); + Map> children = this.taskExplorer.findChildTaskExecutions(parentIds, schemaTarget) + .stream() + .collect(Collectors.groupingBy(AggregateTaskExecution::getSchemaTarget)); + for (String group : children.keySet()) { + SortedSet childIds = children.get(group) + .stream() + .map(AggregateTaskExecution::getExecutionId) + .collect(Collectors.toCollection(TreeSet::new)); + this.performDeleteTaskExecutions(childIds, group); + + } + this.performDeleteTaskExecutions(parentIds, schemaTarget); } } } + } + + private void deleteTaskExecutions(Collection taskExecutions) { + Map> executions = taskExecutions.stream() + .collect(Collectors.groupingBy(AggregateTaskExecution::getSchemaTarget)); + for (String schemaTarget : executions.keySet()) { + SortedSet executionIds = executions.get(schemaTarget) + .stream() + .map(AggregateTaskExecution::getExecutionId) + .collect(Collectors.toCollection(TreeSet::new)); + this.performDeleteTaskExecutions(executionIds, schemaTarget); + } + } + + @Override + @Transactional + public void cleanupExecutions(Set actionsAsSet, Set ids, String schemaTarget) { + performCleanupExecutions(actionsAsSet, ids, schemaTarget); + } + private void performCleanupExecutions(Set actionsAsSet, Set ids, String schemaTarget) { + final SortedSet nonExistingTaskExecutions = new TreeSet<>(); + final SortedSet parentExecutions = new TreeSet<>(); + final SortedSet childExecutions = new TreeSet<>(); + boolean removeData = actionsAsSet.contains(TaskExecutionControllerDeleteAction.REMOVE_DATA); + boolean cleanUp = actionsAsSet.contains(TaskExecutionControllerDeleteAction.CLEANUP); + for (Long id : ids) { + final AggregateTaskExecution taskExecution = this.taskExplorer.getTaskExecution(id, schemaTarget); + if (taskExecution == null) { + nonExistingTaskExecutions.add(id); + } else if (taskExecution.getParentExecutionId() == null) { + parentExecutions.add(taskExecution.getExecutionId()); + } else { + childExecutions.add(taskExecution.getExecutionId()); + } + } if (!nonExistingTaskExecutions.isEmpty()) { if (nonExistingTaskExecutions.size() == 1) { - throw new NoSuchTaskExecutionException(nonExistingTaskExecutions.first()); - } - else { - throw new NoSuchTaskExecutionException(nonExistingTaskExecutions); + throw new NoSuchTaskExecutionException(nonExistingTaskExecutions.first(), schemaTarget); + } else { + throw new NoSuchTaskExecutionException(nonExistingTaskExecutions, schemaTarget); } } - if (actionsAsSet.contains(TaskExecutionControllerDeleteAction.CLEANUP)) { + if (cleanUp) { for (Long id : ids) { - this.cleanupExecution(id); + this.performCleanupExecution(id, schemaTarget); } } - if (actionsAsSet.contains(TaskExecutionControllerDeleteAction.REMOVE_DATA)) { - if (!deletableTaskExecutions.isEmpty()) { - this.deleteTaskExecutions(deletableTaskExecutions); - } - // delete orphaned child execution ids - else if (deletableTaskExecutions.isEmpty() && !nonParentTaskExecutions.isEmpty()) { - this.deleteTaskExecutions(nonParentTaskExecutions); + + if (removeData) { + if (!childExecutions.isEmpty()) { + this.performDeleteTaskExecutions(childExecutions, schemaTarget); } - else if (!nonParentTaskExecutions.isEmpty()) { - throw new CannotDeleteNonParentTaskExecutionException(nonParentTaskExecutions); + if (!parentExecutions.isEmpty()) { + List children = this.taskExplorer.findChildTaskExecutions(parentExecutions, schemaTarget); + if (!children.isEmpty()) { + this.deleteTaskExecutions(children); + } + this.performDeleteTaskExecutions(parentExecutions, schemaTarget); } } - } @Override @Transactional - public void deleteTaskExecutions(Set taskExecutionIds) { + public void deleteTaskExecutions(Set taskExecutionIds, String schemaTarget) { + performDeleteTaskExecutions(taskExecutionIds, schemaTarget); + } + + @Override + public void deleteTaskExecutions(String taskName, boolean onlyCompleted) { + Map> tasks = this.taskExplorer.findTaskExecutions(taskName, onlyCompleted) + .stream().collect(Collectors.groupingBy(AggregateTaskExecution::getSchemaTarget)); + for (String schemaTarget : tasks.keySet()) { + Set executionIds = tasks.get(schemaTarget) + .stream() + .map(AggregateTaskExecution::getExecutionId) + .collect(Collectors.toSet()); + performDeleteTaskExecutions(executionIds, schemaTarget); + } + } + + private void performDeleteTaskExecutions(Set taskExecutionIds, String schemaTarget) { + logger.info("performDeleteTaskExecutions:{}:{}", schemaTarget, taskExecutionIds); Assert.notEmpty(taskExecutionIds, "You must provide at least 1 task execution id."); - final Set taskExecutionIdsWithChildren = new HashSet<>(taskExecutionIds); + final DataflowTaskExecutionDao dataflowTaskExecutionDao = dataflowTaskExecutionDaoContainer.get(schemaTarget); + final DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(schemaTarget); + final Set taskExecutionIdsWithChildren = new HashSet<>(taskExecutionIds); final Set childTaskExecutionIds = dataflowTaskExecutionDao.findChildTaskExecutionIds(taskExecutionIds); - logger.info("Found {} child task execution ids: {}.", childTaskExecutionIds.size(), StringUtils.collectionToCommaDelimitedString(childTaskExecutionIds)); + logger.info("Found {} child task execution ids: {}.", + childTaskExecutionIds.size(), + StringUtils.collectionToCommaDelimitedString(childTaskExecutionIds)); taskExecutionIdsWithChildren.addAll(childTaskExecutionIds); final Map auditData = new LinkedHashMap<>(); @@ -251,7 +357,7 @@ public void deleteTaskExecutions(Set taskExecutionIds) { final Set jobExecutionIds = new HashSet<>(); for (Long taskExecutionId : taskExecutionIdsWithChildren) { - jobExecutionIds.addAll(taskExplorer.getJobExecutionIdsByTaskExecutionId(taskExecutionId)); + jobExecutionIds.addAll(taskExplorer.getJobExecutionIdsByTaskExecutionId(taskExecutionId, schemaTarget)); } logger.info("There are {} associated job executions.", jobExecutionIds.size()); @@ -260,45 +366,10 @@ public void deleteTaskExecutions(Set taskExecutionIds) { auditData.put("Deleted # of Job Executions", jobExecutionIds.size()); auditData.put("Deleted Job Execution IDs", StringUtils.collectionToDelimitedString(jobExecutionIds, ", ")); - if (!jobExecutionIds.isEmpty()) { - final Set stepExecutionIds = dataflowJobExecutionDao.findStepExecutionIds(jobExecutionIds); - - final int numberOfDeletedBatchStepExecutionContextRows; - if (!stepExecutionIds.isEmpty()) { - numberOfDeletedBatchStepExecutionContextRows = dataflowJobExecutionDao.deleteBatchStepExecutionContextByStepExecutionIds(stepExecutionIds); - } - else { - numberOfDeletedBatchStepExecutionContextRows = 0; - } + int chunkSize = getTaskExecutionDeleteChunkSize(this.dataSource); - final int numberOfDeletedBatchStepExecutionRows = dataflowJobExecutionDao.deleteBatchStepExecutionsByJobExecutionIds(jobExecutionIds); - final int numberOfDeletedBatchJobExecutionContextRows = dataflowJobExecutionDao.deleteBatchJobExecutionContextByJobExecutionIds(jobExecutionIds); - final int numberOfDeletedBatchJobExecutionParamRows = dataflowJobExecutionDao.deleteBatchJobExecutionParamsByJobExecutionIds(jobExecutionIds); - final int numberOfDeletedBatchJobExecutionRows = dataflowJobExecutionDao.deleteBatchJobExecutionByJobExecutionIds(jobExecutionIds); - final int numberOfDeletedUnusedBatchJobInstanceRows = dataflowJobExecutionDao.deleteUnusedBatchJobInstances(); - - logger.info("Deleted the following Batch Job Execution related data for {} Job Executions.\n" + - "Batch Step Execution Context Rows: {}\n" + - "Batch Step Executions Rows: {}\n" + - "Batch Job Execution Context Rows: {}\n" + - "Batch Job Execution Param Rows: {}\n" + - "Batch Job Execution Rows: {}\n" + - "Batch Job Instance Rows: {}.", - jobExecutionIds.size(), - numberOfDeletedBatchStepExecutionContextRows, - numberOfDeletedBatchStepExecutionRows, - numberOfDeletedBatchJobExecutionContextRows, - numberOfDeletedBatchJobExecutionParamRows, - numberOfDeletedBatchJobExecutionRows, - numberOfDeletedUnusedBatchJobInstanceRows - ); - - auditData.put("Batch Step Execution Context", numberOfDeletedBatchStepExecutionContextRows); - auditData.put("Batch Step Executions", numberOfDeletedBatchStepExecutionRows); - auditData.put("Batch Job Execution Context Rows", numberOfDeletedBatchJobExecutionContextRows); - auditData.put("Batch Job Execution Params", numberOfDeletedBatchJobExecutionParamRows); - auditData.put("Batch Job Executions", numberOfDeletedBatchJobExecutionRows); - auditData.put("Batch Job Instance Rows", numberOfDeletedUnusedBatchJobInstanceRows); + if (!jobExecutionIds.isEmpty()) { + deleteRelatedJobAndStepExecutions(jobExecutionIds, auditData, chunkSize, schemaTarget); } // Delete Task Related Data @@ -306,45 +377,141 @@ public void deleteTaskExecutions(Set taskExecutionIds) { auditData.put("Deleted # of Task Executions", taskExecutionIdsWithChildren.size()); auditData.put("Deleted Task Execution IDs", StringUtils.collectionToDelimitedString(taskExecutionIdsWithChildren, ", ")); - final AtomicInteger numberOfDeletedTaskExecutionParamRows = new AtomicInteger(0); - final AtomicInteger numberOfDeletedTaskTaskBatchRelationshipRows = new AtomicInteger(0); - final AtomicInteger numberOfDeletedTaskManifestRows = new AtomicInteger(0); - final AtomicInteger numberOfDeletedTaskExecutionRows = new AtomicInteger(0); - - int chunkSize = getTaskExecutionDeleteChunkSize(this.dataSource); - if(chunkSize <= 0) { - numberOfDeletedTaskExecutionParamRows.addAndGet(this.dataflowTaskExecutionDao.deleteTaskExecutionParamsByTaskExecutionIds(taskExecutionIdsWithChildren)); - numberOfDeletedTaskTaskBatchRelationshipRows.addAndGet(this.dataflowTaskExecutionDao.deleteTaskTaskBatchRelationshipsByTaskExecutionIds(taskExecutionIdsWithChildren)); - numberOfDeletedTaskManifestRows.addAndGet(this.dataflowTaskExecutionMetadataDao.deleteManifestsByTaskExecutionIds(taskExecutionIdsWithChildren)); - numberOfDeletedTaskExecutionRows.addAndGet(this.dataflowTaskExecutionDao.deleteTaskExecutionsByTaskExecutionIds(taskExecutionIdsWithChildren)); - } - else { - split(taskExecutionIdsWithChildren, chunkSize).stream().forEach( taskExecutionIdSubsetList -> { + final AtomicInteger numberOfDeletedTaskExecutionParamRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedTaskTaskBatchRelationshipRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedTaskManifestRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedTaskExecutionRows = new AtomicInteger(0); + + if (chunkSize <= 0) { + numberOfDeletedTaskExecutionParamRows.addAndGet(dataflowTaskExecutionDao.deleteTaskExecutionParamsByTaskExecutionIds(taskExecutionIdsWithChildren)); + numberOfDeletedTaskTaskBatchRelationshipRows.addAndGet(dataflowTaskExecutionDao.deleteTaskTaskBatchRelationshipsByTaskExecutionIds( + taskExecutionIdsWithChildren)); + numberOfDeletedTaskManifestRows.addAndGet(dataflowTaskExecutionMetadataDao.deleteManifestsByTaskExecutionIds(taskExecutionIdsWithChildren)); + numberOfDeletedTaskExecutionRows.addAndGet(dataflowTaskExecutionDao.deleteTaskExecutionsByTaskExecutionIds(taskExecutionIdsWithChildren)); + } else { + split(taskExecutionIdsWithChildren, chunkSize).forEach(taskExecutionIdSubsetList -> { Set taskExecutionIdSubset = new HashSet<>(taskExecutionIdSubsetList); - numberOfDeletedTaskExecutionParamRows.addAndGet(this.dataflowTaskExecutionDao.deleteTaskExecutionParamsByTaskExecutionIds(taskExecutionIdSubset)); - numberOfDeletedTaskTaskBatchRelationshipRows.addAndGet(this.dataflowTaskExecutionDao.deleteTaskTaskBatchRelationshipsByTaskExecutionIds(taskExecutionIdSubset)); - numberOfDeletedTaskManifestRows.addAndGet(this.dataflowTaskExecutionMetadataDao.deleteManifestsByTaskExecutionIds(taskExecutionIdSubset)); - numberOfDeletedTaskExecutionRows.addAndGet(this.dataflowTaskExecutionDao.deleteTaskExecutionsByTaskExecutionIds(taskExecutionIdSubset)); + numberOfDeletedTaskExecutionParamRows.addAndGet(dataflowTaskExecutionDao.deleteTaskExecutionParamsByTaskExecutionIds(taskExecutionIdSubset)); + numberOfDeletedTaskTaskBatchRelationshipRows.addAndGet(dataflowTaskExecutionDao.deleteTaskTaskBatchRelationshipsByTaskExecutionIds( + taskExecutionIdSubset)); + numberOfDeletedTaskManifestRows.addAndGet(dataflowTaskExecutionMetadataDao.deleteManifestsByTaskExecutionIds(taskExecutionIdSubset)); + numberOfDeletedTaskExecutionRows.addAndGet(dataflowTaskExecutionDao.deleteTaskExecutionsByTaskExecutionIds(taskExecutionIdSubset)); }); } logger.info("Deleted the following Task Execution related data for {} Task Executions:\n" + - "Task Execution Param Rows: {}\n" + - "Task Batch Relationship Rows: {}\n" + - "Task Manifest Rows: {}\n" + - "Task Execution Rows: {}.", + "Task Execution Param Rows: {}\n" + + "Task Batch Relationship Rows: {}\n" + + "Task Manifest Rows: {}\n" + + "Task Execution Rows: {}.", taskExecutionIdsWithChildren.size(), numberOfDeletedTaskExecutionParamRows, numberOfDeletedTaskTaskBatchRelationshipRows, numberOfDeletedTaskManifestRows, numberOfDeletedTaskExecutionRows - ); + ); // Populate Audit Record - auditRecordService.populateAndSaveAuditRecordUsingMapData( - AuditOperationType.TASK, AuditActionType.DELETE, - taskExecutionIdsWithChildren.size() + " Task Execution Delete(s)", auditData, null); + auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.TASK, + AuditActionType.DELETE, + taskExecutionIdsWithChildren.size() + " Task Execution Delete(s)", + auditData, + null); + } + + private void deleteRelatedJobAndStepExecutions(Set jobExecutionIds, Map auditData, int chunkSize, String schemaTarget) { + + final Set stepExecutionIds = findStepExecutionIds(jobExecutionIds, chunkSize, schemaTarget); + + final AtomicInteger numberOfDeletedBatchStepExecutionContextRows = new AtomicInteger(0); + if (!stepExecutionIds.isEmpty()) { + deleteBatchStepExecutionContextByStepExecutionIds(stepExecutionIds, chunkSize, numberOfDeletedBatchStepExecutionContextRows, schemaTarget); + } + deleteStepAndJobExecutionsByJobExecutionId(jobExecutionIds, chunkSize, auditData, numberOfDeletedBatchStepExecutionContextRows, schemaTarget); + + } + + private Set findStepExecutionIds(Set jobExecutionIds, int chunkSize, String schemaTarget) { + final Set stepExecutionIds = ConcurrentHashMap.newKeySet(); + DataflowJobExecutionDao dataflowJobExecutionDao = dataflowJobExecutionDaoContainer.get(schemaTarget); + if (chunkSize <= 0) { + stepExecutionIds.addAll(dataflowJobExecutionDao.findStepExecutionIds(jobExecutionIds)); + } else { + split(jobExecutionIds, chunkSize).forEach(jobExecutionIdSubsetList -> { + Set jobExecutionIdSubset = new HashSet<>(jobExecutionIdSubsetList); + stepExecutionIds.addAll(dataflowJobExecutionDao.findStepExecutionIds(jobExecutionIdSubset)); + }); + } + + return stepExecutionIds; + } + + private void deleteBatchStepExecutionContextByStepExecutionIds( + Set stepExecutionIds, + int chunkSize, + AtomicInteger numberOfDeletedBatchStepExecutionContextRows, + String schemaTarget + ) { + final DataflowJobExecutionDao dataflowJobExecutionDao = dataflowJobExecutionDaoContainer.get(schemaTarget); + if (chunkSize <= 0) { + numberOfDeletedBatchStepExecutionContextRows.addAndGet(dataflowJobExecutionDao.deleteBatchStepExecutionContextByStepExecutionIds(stepExecutionIds)); + + } else { + split(stepExecutionIds, chunkSize).forEach(stepExecutionIdSubsetList -> { + Set stepExecutionIdSubset = new HashSet<>(stepExecutionIdSubsetList); + numberOfDeletedBatchStepExecutionContextRows.addAndGet(dataflowJobExecutionDao.deleteBatchStepExecutionContextByStepExecutionIds( + stepExecutionIdSubset)); + }); + } + } + + private void deleteStepAndJobExecutionsByJobExecutionId( + Set jobExecutionIds, + int chunkSize, + Map auditData, + AtomicInteger numberOfDeletedBatchStepExecutionContextRows, + String schemaTarget + ) { + DataflowJobExecutionDao dataflowJobExecutionDao = dataflowJobExecutionDaoContainer.get(schemaTarget); + final AtomicInteger numberOfDeletedBatchStepExecutionRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedBatchJobExecutionContextRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedBatchJobExecutionParamRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedBatchJobExecutionRows = new AtomicInteger(0); + + if (chunkSize <= 0) { + numberOfDeletedBatchStepExecutionRows.addAndGet(dataflowJobExecutionDao.deleteBatchStepExecutionsByJobExecutionIds(jobExecutionIds)); + numberOfDeletedBatchJobExecutionContextRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionContextByJobExecutionIds(jobExecutionIds)); + numberOfDeletedBatchJobExecutionParamRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionParamsByJobExecutionIds(jobExecutionIds)); + numberOfDeletedBatchJobExecutionRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionByJobExecutionIds(jobExecutionIds)); + } else { + split(jobExecutionIds, chunkSize).forEach(jobExecutionIdSubsetList -> { + Set jobExecutionIdSubset = new HashSet<>(jobExecutionIdSubsetList); + numberOfDeletedBatchStepExecutionRows.addAndGet(dataflowJobExecutionDao.deleteBatchStepExecutionsByJobExecutionIds(jobExecutionIdSubset)); + numberOfDeletedBatchJobExecutionContextRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionContextByJobExecutionIds( + jobExecutionIdSubset)); + numberOfDeletedBatchJobExecutionParamRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionParamsByJobExecutionIds(jobExecutionIdSubset)); + numberOfDeletedBatchJobExecutionRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionByJobExecutionIds(jobExecutionIdSubset)); + }); + } + + final int numberOfDeletedUnusedBatchJobInstanceRows = dataflowJobExecutionDao.deleteUnusedBatchJobInstances(); + + logger.info("Deleted the following Batch Job Execution related data for {} Job Executions.\n" + "Batch Step Execution Context Rows: {}\n" + "Batch Step Executions Rows: {}\n" + "Batch Job Execution Context Rows: {}\n" + "Batch Job Execution Param Rows: {}\n" + "Batch Job Execution Rows: {}\n" + "Batch Job Instance Rows: {}.", + jobExecutionIds.size(), + numberOfDeletedBatchStepExecutionContextRows, + numberOfDeletedBatchStepExecutionRows, + numberOfDeletedBatchJobExecutionContextRows, + numberOfDeletedBatchJobExecutionParamRows, + numberOfDeletedBatchJobExecutionRows, + numberOfDeletedUnusedBatchJobInstanceRows); + + auditData.put("Batch Step Execution Context", numberOfDeletedBatchStepExecutionContextRows); + auditData.put("Batch Step Executions", numberOfDeletedBatchStepExecutionRows); + auditData.put("Batch Job Execution Context Rows", numberOfDeletedBatchJobExecutionContextRows); + auditData.put("Batch Job Execution Params", numberOfDeletedBatchJobExecutionParamRows); + auditData.put("Batch Job Executions", numberOfDeletedBatchJobExecutionRows); + auditData.put("Batch Job Instance Rows", numberOfDeletedUnusedBatchJobInstanceRows); } /** @@ -352,12 +519,13 @@ public void deleteTaskExecutions(Set taskExecutionIds) { * greater than zero this overrides the chunk size for the specific database type. * If the database type has no fixed number of maximum elements allowed in the {@code IN} clause * then zero is returned. + * * @param dataSource the datasource used by data flow. * @return the chunk size to be used for deleting task executions. */ private int getTaskExecutionDeleteChunkSize(DataSource dataSource) { int result = this.taskDeleteChunkSize; - if(this.taskDeleteChunkSize < 1) { + if (this.taskDeleteChunkSize < 1) { try { DatabaseType databaseType = DatabaseType.fromMetaData(dataSource); String name = databaseType.name(); @@ -367,8 +535,7 @@ private int getTaskExecutionDeleteChunkSize(DataSource dataSource) { if (name.startsWith("ORACLE")) { result = ORACLE_SERVER_CHUNK_SIZE; } - } - catch (MetaDataAccessException mdae) { + } catch (MetaDataAccessException mdae) { logger.warn("Unable to retrieve metadata for database when deleting task executions", mdae); } } @@ -377,32 +544,34 @@ private int getTaskExecutionDeleteChunkSize(DataSource dataSource) { static Collection> split(Collection input, int max) { final AtomicInteger count = new AtomicInteger(0); - return input.stream() - .collect(Collectors.groupingBy(s -> count.getAndIncrement() / max)) - .values(); + return input.stream().collect(Collectors.groupingBy(s -> count.getAndIncrement() / max)).values(); } @Override public void deleteTaskDefinition(String name) { - TaskDefinition taskDefinition = this.taskDefinitionRepository.findById(name) - .orElseThrow(() -> new NoSuchTaskDefinitionException(name)); + TaskDefinition taskDefinition = this.taskDefinitionRepository.findById(name).orElseThrow(() -> new NoSuchTaskDefinitionException(name)); deleteTaskDefinition(taskDefinition); - auditRecordService.populateAndSaveAuditRecord( - AuditOperationType.TASK, AuditActionType.DELETE, - taskDefinition.getTaskName(), this.argumentSanitizer.sanitizeTaskDsl(taskDefinition), null); + auditRecordService.populateAndSaveAuditRecord(AuditOperationType.TASK, + AuditActionType.DELETE, + taskDefinition.getTaskName(), + this.argumentSanitizer.sanitizeTaskDsl(taskDefinition), + null); } @Override public void deleteTaskDefinition(String name, boolean cleanup) { if (cleanup) { - Set taskExecutionIds = this.dataflowTaskExecutionDao.getTaskExecutionIdsByTaskName(name); - final Set actionsAsSet = new HashSet<>(); - actionsAsSet.add(TaskExecutionControllerDeleteAction.CLEANUP); - actionsAsSet.add(TaskExecutionControllerDeleteAction.REMOVE_DATA); - if (!taskExecutionIds.isEmpty()) { - cleanupExecutions(actionsAsSet, taskExecutionIds); + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + DataflowTaskExecutionDao dataflowTaskExecutionDao = dataflowTaskExecutionDaoContainer.get(target.getName()); + Set taskExecutionIds = dataflowTaskExecutionDao.getTaskExecutionIdsByTaskName(name); + final Set actionsAsSet = new HashSet<>(); + actionsAsSet.add(TaskExecutionControllerDeleteAction.CLEANUP); + actionsAsSet.add(TaskExecutionControllerDeleteAction.REMOVE_DATA); + if (!taskExecutionIds.isEmpty()) { + performCleanupExecutions(actionsAsSet, taskExecutionIds, target.getName()); + } } } this.deleteTaskDefinition(name); @@ -415,9 +584,11 @@ public void deleteAll() { for (TaskDefinition taskDefinition : allTaskDefinition) { deleteTaskDefinition(taskDefinition); - auditRecordService.populateAndSaveAuditRecord( - AuditOperationType.TASK, AuditActionType.DELETE, - taskDefinition.getTaskName(), this.argumentSanitizer.sanitizeTaskDsl(taskDefinition), null); + auditRecordService.populateAndSaveAuditRecord(AuditOperationType.TASK, + AuditActionType.DELETE, + taskDefinition.getTaskName(), + this.argumentSanitizer.sanitizeTaskDsl(taskDefinition), + null); } } @@ -438,8 +609,7 @@ private void deleteTaskDefinition(TaskDefinition taskDefinition) { } try { destroyChildTask(childTaskPrefix + childName); - } - catch (ObjectOptimisticLockingFailureException e) { + } catch (ObjectOptimisticLockingFailureException e) { logger.warn("Attempted delete on a child task that is currently being deleted"); } }); @@ -447,14 +617,13 @@ private void deleteTaskDefinition(TaskDefinition taskDefinition) { // destroy normal task or composed parent task try { destroyPrimaryTask(taskDefinition.getTaskName()); - } catch (ObjectOptimisticLockingFailureException e) { - logger.warn(String.format("Attempted delete on task %s that is currently being deleted", taskDefinition.getTaskName())); + } catch (ObjectOptimisticLockingFailureException e) { + logger.warn("Attempted delete on task {} that is currently being deleted", taskDefinition.getTaskName()); } } private void destroyPrimaryTask(String name) { - TaskDefinition taskDefinition = taskDefinitionRepository.findById(name) - .orElseThrow(() -> new NoSuchTaskDefinitionException(name)); + TaskDefinition taskDefinition = taskDefinitionRepository.findById(name).orElseThrow(() -> new NoSuchTaskDefinitionException(name)); destroyTask(taskDefinition); } @@ -465,36 +634,29 @@ private void destroyChildTask(String name) { private void destroyTask(TaskDefinition taskDefinition) { taskDefinitionRepository.deleteById(taskDefinition.getName()); - TaskDeployment taskDeployment = - this.taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskDefinition.getTaskName()); + TaskDeployment taskDeployment = this.taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskDefinition.getTaskName()); if (taskDeployment != null) { Launcher launcher = launcherRepository.findByName(taskDeployment.getPlatformName()); if (launcher != null) { TaskLauncher taskLauncher = launcher.getTaskLauncher(); taskLauncher.destroy(taskDefinition.getName()); } - } - else { - if(!findAndDeleteTaskResourcesAcrossPlatforms(taskDefinition)) { - logger.info("TaskLauncher.destroy not invoked for task " + - taskDefinition.getTaskName() + ". Did not find a previously launched task to destroy."); + } else { + if (!findAndDeleteTaskResourcesAcrossPlatforms(taskDefinition)) { + logger.info("TaskLauncher.destroy not invoked for task " + taskDefinition.getTaskName() + ". Did not find a previously launched task to destroy."); } } } private boolean findAndDeleteTaskResourcesAcrossPlatforms(TaskDefinition taskDefinition) { boolean result = false; - Iterable launchers = launcherRepository.findAll(); - Iterator launcherIterator = launchers.iterator(); - while(launcherIterator.hasNext()) { - Launcher launcher = launcherIterator.next(); + for (Launcher launcher : launcherRepository.findAll()) { try { launcher.getTaskLauncher().destroy(taskDefinition.getName()); - logger.info(String.format("Deleted task app resources for %s in platform %s", taskDefinition.getName(), launcher.getName())); + logger.info("Deleted task app resources for {} in platform {}", taskDefinition.getName(), launcher.getName()); result = true; - } - catch (Exception ex) { - logger.info(String.format("Attempted delete of app resources for %s but none found on platform %s.", taskDefinition.getName(), launcher.getName())); + } catch (Exception ex) { + logger.info("Attempted delete of app resources for {} but none found on platform {}.", taskDefinition.getName(), launcher.getName()); } } return result; diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionInfoService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionInfoService.java index 809c0cacc8..8cb83fbc7e 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionInfoService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionInfoService.java @@ -19,10 +19,16 @@ import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; import org.springframework.cloud.dataflow.core.AllPlatformsTaskExecutionInformation; import org.springframework.cloud.dataflow.core.AppRegistration; import org.springframework.cloud.dataflow.core.ApplicationType; @@ -39,9 +45,9 @@ import org.springframework.cloud.dataflow.server.service.TaskExecutionInfoService; import org.springframework.cloud.deployer.spi.core.AppDefinition; import org.springframework.cloud.deployer.spi.core.AppDeploymentRequest; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.core.io.Resource; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; /** * Default implementation of the {@link DefaultTaskExecutionInfoService} interface. @@ -59,8 +65,10 @@ * @author Michael Wirth * @author David Turanski * @author Daniel Serleg + * @author Corneil du Plessis */ public class DefaultTaskExecutionInfoService implements TaskExecutionInfoService { + private final static Logger logger = LoggerFactory.getLogger(DefaultTaskExecutionInfoService.class); private final DataSourceProperties dataSourceProperties; @@ -72,7 +80,7 @@ public class DefaultTaskExecutionInfoService implements TaskExecutionInfoService /** * Used to read TaskExecutions. */ - private final TaskExplorer taskExplorer; + private final AggregateTaskExplorer taskExplorer; private final TaskDefinitionRepository taskDefinitionRepository; @@ -87,48 +95,58 @@ public class DefaultTaskExecutionInfoService implements TaskExecutionInfoService /** * Initializes the {@link DefaultTaskExecutionInfoService}. * - * @param dataSourceProperties the data source properties. - * @param appRegistryService URI registry this service will use to look up app URIs. - * @param taskExplorer the explorer this service will use to lookup task executions - * @param taskDefinitionRepository the {@link TaskDefinitionRepository} this service will - * use for task CRUD operations. + * @param dataSourceProperties the data source properties. + * @param appRegistryService URI registry this service will use to look up app URIs. + * @param taskExplorer the explorer this service will use to lookup task executions + * @param taskDefinitionRepository the {@link TaskDefinitionRepository} this service will + * use for task CRUD operations. * @param taskConfigurationProperties the properties used to define the behavior of tasks - * @param launcherRepository the launcher repository - * @param taskPlatforms the task platforms + * @param launcherRepository the launcher repository + * @param taskPlatforms the task platforms */ @Deprecated - public DefaultTaskExecutionInfoService(DataSourceProperties dataSourceProperties, - AppRegistryService appRegistryService, - TaskExplorer taskExplorer, - TaskDefinitionRepository taskDefinitionRepository, - TaskConfigurationProperties taskConfigurationProperties, - LauncherRepository launcherRepository, - List taskPlatforms) { - this(dataSourceProperties, appRegistryService, taskExplorer, taskDefinitionRepository, - taskConfigurationProperties, launcherRepository, taskPlatforms, null); + public DefaultTaskExecutionInfoService( + DataSourceProperties dataSourceProperties, + AppRegistryService appRegistryService, + AggregateTaskExplorer taskExplorer, + TaskDefinitionRepository taskDefinitionRepository, + TaskConfigurationProperties taskConfigurationProperties, + LauncherRepository launcherRepository, + List taskPlatforms + ) { + this(dataSourceProperties, + appRegistryService, + taskExplorer, + taskDefinitionRepository, + taskConfigurationProperties, + launcherRepository, + taskPlatforms, + null); } /** * Initializes the {@link DefaultTaskExecutionInfoService}. * - * @param dataSourceProperties the data source properties. - * @param appRegistryService URI registry this service will use to look up app URIs. - * @param taskExplorer the explorer this service will use to lookup task executions - * @param taskDefinitionRepository the {@link TaskDefinitionRepository} this service will - * use for task CRUD operations. - * @param taskConfigurationProperties the properties used to define the behavior of tasks - * @param launcherRepository the launcher repository - * @param taskPlatforms the task platforms + * @param dataSourceProperties the data source properties. + * @param appRegistryService URI registry this service will use to look up app URIs. + * @param taskExplorer the explorer this service will use to lookup task executions + * @param taskDefinitionRepository the {@link TaskDefinitionRepository} this service will + * use for task CRUD operations. + * @param taskConfigurationProperties the properties used to define the behavior of tasks + * @param launcherRepository the launcher repository + * @param taskPlatforms the task platforms * @param composedTaskRunnerConfigurationProperties the properties used to define the behavior of CTR */ - public DefaultTaskExecutionInfoService(DataSourceProperties dataSourceProperties, - AppRegistryService appRegistryService, - TaskExplorer taskExplorer, - TaskDefinitionRepository taskDefinitionRepository, - TaskConfigurationProperties taskConfigurationProperties, - LauncherRepository launcherRepository, - List taskPlatforms, - ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties) { + public DefaultTaskExecutionInfoService( + DataSourceProperties dataSourceProperties, + AppRegistryService appRegistryService, + AggregateTaskExplorer taskExplorer, + TaskDefinitionRepository taskDefinitionRepository, + TaskConfigurationProperties taskConfigurationProperties, + LauncherRepository launcherRepository, + List taskPlatforms, + ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties + ) { Assert.notNull(dataSourceProperties, "DataSourceProperties must not be null"); Assert.notNull(appRegistryService, "AppRegistryService must not be null"); Assert.notNull(taskDefinitionRepository, "TaskDefinitionRepository must not be null"); @@ -148,8 +166,10 @@ public DefaultTaskExecutionInfoService(DataSourceProperties dataSourceProperties } @Override - public TaskExecutionInformation findTaskExecutionInformation(String taskName, - Map taskDeploymentProperties, boolean addDatabaseCredentials, Map previousTaskDeploymentProperties) { + public TaskExecutionInformation findTaskExecutionInformation( + String taskName, + Map taskDeploymentProperties, boolean addDatabaseCredentials, Map previousTaskDeploymentProperties + ) { Assert.hasText(taskName, "The provided taskName must not be null or empty."); Assert.notNull(taskDeploymentProperties, "The provided runtimeProperties must not be null."); @@ -157,11 +177,11 @@ public TaskExecutionInformation findTaskExecutionInformation(String taskName, taskExecutionInformation.setTaskDeploymentProperties(taskDeploymentProperties); TaskDefinition originalTaskDefinition = taskDefinitionRepository.findById(taskName) - .orElseThrow(() -> new NoSuchTaskDefinitionException(taskName)); + .orElseThrow(() -> new NoSuchTaskDefinitionException(taskName)); //TODO: This normally called by JPA automatically but `AutoCreateTaskDefinitionTests` fails without this. originalTaskDefinition.initialize(); TaskParser taskParser = new TaskParser(originalTaskDefinition.getName(), originalTaskDefinition.getDslText(), - true, true); + true, true); TaskNode taskNode = taskParser.parse(); // if composed task definition replace definition with one composed task // runner and executable graph. @@ -169,34 +189,31 @@ public TaskExecutionInformation findTaskExecutionInformation(String taskName, AppRegistration appRegistration; if (taskNode.isComposed()) { taskDefinitionToUse = new TaskDefinition(originalTaskDefinition.getName(), - TaskServiceUtils.createComposedTaskDefinition(taskNode.toExecutableDSL())); + TaskServiceUtils.createComposedTaskDefinition(taskNode.toExecutableDSL())); taskExecutionInformation.setTaskDeploymentProperties( - TaskServiceUtils.establishComposedTaskProperties(taskDeploymentProperties, - taskNode)); + TaskServiceUtils.establishComposedTaskProperties(taskDeploymentProperties, + taskNode)); taskDefinitionToUse = TaskServiceUtils.updateTaskProperties(taskDefinitionToUse, - dataSourceProperties, addDatabaseCredentials); + dataSourceProperties, addDatabaseCredentials); try { appRegistration = new AppRegistration(ComposedTaskRunnerConfigurationProperties.COMPOSED_TASK_RUNNER_NAME, - ApplicationType.task, - new URI(TaskServiceUtils.getComposedTaskLauncherUri(this.taskConfigurationProperties, - this.composedTaskRunnerConfigurationProperties))); - } - catch (URISyntaxException e) { + ApplicationType.task, + new URI(TaskServiceUtils.getComposedTaskLauncherUri(this.taskConfigurationProperties, + this.composedTaskRunnerConfigurationProperties))); + } catch (URISyntaxException e) { throw new IllegalStateException("Invalid Compose Task Runner Resource", e); } - } - else { + } else { taskDefinitionToUse = TaskServiceUtils.updateTaskProperties(originalTaskDefinition, - dataSourceProperties, addDatabaseCredentials); + dataSourceProperties, addDatabaseCredentials); String label = null; if (taskNode.getTaskApp() != null) { TaskAppNode taskAppNode = taskNode.getTaskApp(); if (taskAppNode.getLabel() != null) { label = taskAppNode.getLabel().stringValue(); - } - else { + } else { label = taskAppNode.getName(); } } @@ -208,11 +225,10 @@ public TaskExecutionInformation findTaskExecutionInformation(String taskName, // if we have version, use that or rely on default version set if (version == null) { appRegistration = appRegistryService.find(taskDefinitionToUse.getRegisteredAppName(), - ApplicationType.task); - } - else { + ApplicationType.task); + } else { appRegistration = appRegistryService.find(taskDefinitionToUse.getRegisteredAppName(), - ApplicationType.task, version); + ApplicationType.task, version); } } @@ -226,6 +242,70 @@ public TaskExecutionInformation findTaskExecutionInformation(String taskName, return taskExecutionInformation; } + @Override + public Set composedTaskChildNames(String taskName) { + TaskDefinition taskDefinition = taskDefinitionRepository.findByTaskName(taskName); + TaskParser taskParser = new TaskParser(taskDefinition.getTaskName(), taskDefinition.getDslText(), true, true); + Set result = new HashSet<>(); + TaskNode taskNode = taskParser.parse(); + if (taskNode.isComposed()) { + extractNames(taskNode, result); + } + return result; + } + + @Override + public Set taskNames(String taskName) { + TaskDefinition taskDefinition = taskDefinitionRepository.findByTaskName(taskName); + TaskParser taskParser = new TaskParser(taskDefinition.getTaskName(), taskDefinition.getDslText(), true, true); + Set result = new HashSet<>(); + TaskNode taskNode = taskParser.parse(); + extractNames(taskNode, result); + return result; + } + + private void extractNames(TaskNode taskNode, Set result) { + for (TaskApp subTask : taskNode.getTaskApps()) { + logger.debug("subTask:{}:{}:{}:{}", subTask.getName(), subTask.getTaskName(), subTask.getLabel(), subTask); + TaskDefinition subTaskDefinition = taskDefinitionRepository.findByTaskName(subTask.getName()); + if (subTaskDefinition != null) { + if(StringUtils.hasText(subTask.getLabel())) { + result.add(subTaskDefinition.getRegisteredAppName() + "," + subTask.getLabel()); + } else { + result.add(subTaskDefinition.getRegisteredAppName()); + } + TaskParser subTaskParser = new TaskParser(subTaskDefinition.getTaskName(), subTaskDefinition.getDslText(), true, true); + TaskNode subTaskNode = subTaskParser.parse(); + if (subTaskNode != null && subTaskNode.getTaskApp() != null) { + for (TaskApp subSubTask : subTaskNode.getTaskApps()) { + logger.debug("subSubTask:{}:{}:{}:{}", subSubTask.getName(), subSubTask.getTaskName(), subSubTask.getLabel(), subSubTask); + TaskDefinition subSubTaskDefinition = taskDefinitionRepository.findByTaskName(subSubTask.getName()); + if (subSubTaskDefinition != null) { + if (subSubTask.getLabel() != null && !subTask.getLabel().contains("$")) { + result.add(subSubTaskDefinition.getRegisteredAppName() + "," + subSubTask.getLabel()); + } else { + result.add(subSubTaskDefinition.getRegisteredAppName()); + } + } + } + } + } else { + if ((subTask.getLabel() == null || subTask.getLabel().equals(subTask.getName())) && !subTask.getName().contains("$")) { + result.add(subTask.getName()); + } else { + if (!subTask.getName().contains("$") && !subTask.getLabel().contains("$")) { + result.add(subTask.getName() + "," + subTask.getLabel()); + } else if (!subTask.getName().contains("$")) { + result.add(subTask.getName()); + } else if (!subTask.getTaskName().contains("$")) { + result.add(subTask.getTaskName()); + } + } + } + } + } + + @Override public List createTaskDeploymentRequests(String taskName, String dslText) { List appDeploymentRequests = new ArrayList<>(); TaskParser taskParser = new TaskParser(taskName, dslText, true, true); @@ -239,7 +319,7 @@ public List createTaskDeploymentRequests(String taskName, TaskNode subTaskNode = subTaskParser.parse(); String subTaskName = subTaskNode.getTaskApp().getName(); AppRegistration appRegistration = appRegistryService.find(subTaskName, - ApplicationType.task); + ApplicationType.task); Assert.notNull(appRegistration, "Unknown task app: " + subTask.getName()); Resource appResource = appRegistryService.getAppResource(appRegistration); @@ -248,12 +328,13 @@ public List createTaskDeploymentRequests(String taskName, AppDefinition appDefinition = new AppDefinition(subTask.getName(), subTaskNode.getTaskApp().getArgumentsAsMap()); AppDeploymentRequest appDeploymentRequest = new AppDeploymentRequest(appDefinition, - appResource, null, null); + appResource, null, null); appDeploymentRequests.add(appDeploymentRequest); } } return appDeploymentRequests; } + @Override public AllPlatformsTaskExecutionInformation findAllPlatformTaskExecutionInformation() { return new AllPlatformsTaskExecutionInformation(this.taskPlatforms); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionRepositoryService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionRepositoryService.java index 8b07be2e57..79c449cd63 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionRepositoryService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionRepositoryService.java @@ -16,6 +16,10 @@ package org.springframework.cloud.dataflow.server.service.impl; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskRepositoryContainer; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.service.TaskExecutionCreationService; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskRepository; @@ -30,16 +34,29 @@ @Transactional public class DefaultTaskExecutionRepositoryService implements TaskExecutionCreationService { - private TaskRepository taskRepository; + private final TaskRepositoryContainer taskRepositoryContainer; + private final AggregateExecutionSupport aggregateExecutionSupport; - public DefaultTaskExecutionRepositoryService(TaskRepository taskRepository) { - Assert.notNull(taskRepository, "taskRepository must not be null"); - this.taskRepository = taskRepository; + private final TaskDefinitionReader taskDefinitionReader; + + public DefaultTaskExecutionRepositoryService( + TaskRepositoryContainer taskRepositoryContainer, + AggregateExecutionSupport aggregateExecutionSupport, + TaskDefinitionReader taskDefinitionReader + ) { + Assert.notNull(taskRepositoryContainer, "taskRepository must not be null"); + Assert.notNull(aggregateExecutionSupport, "aggregateExecutionSupport must not be null"); + Assert.notNull(taskDefinitionReader, "taskDefinitionReader must not be null"); + this.taskRepositoryContainer = taskRepositoryContainer; + this.aggregateExecutionSupport = aggregateExecutionSupport; + this.taskDefinitionReader = taskDefinitionReader; } @Override @Transactional(propagation = Propagation.REQUIRES_NEW) - public TaskExecution createTaskExecution(String taskName) { + public TaskExecution createTaskExecution(String taskName, String version) { + SchemaVersionTarget schemaVersionTarget = this.aggregateExecutionSupport.findSchemaVersionTarget(taskName, version, taskDefinitionReader); + TaskRepository taskRepository = this.taskRepositoryContainer.get(schemaVersionTarget.getName()); return taskRepository.createTaskExecution(taskName); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionService.java index 9ad1a41f07..55c34f82b9 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionService.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2021 the original author or authors. + * Copyright 2015-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,7 +18,9 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; +import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -35,10 +37,15 @@ import org.slf4j.LoggerFactory; import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; +import org.springframework.cloud.dataflow.aggregate.task.DataflowTaskExecutionQueryDao; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskRepositoryContainer; import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.core.AuditActionType; import org.springframework.cloud.dataflow.core.AuditOperationType; -import org.springframework.cloud.dataflow.core.Base64Utils; +import org.springframework.cloud.dataflow.core.LaunchResponse; import org.springframework.cloud.dataflow.core.Launcher; import org.springframework.cloud.dataflow.core.TaskDefinition; import org.springframework.cloud.dataflow.core.TaskDeployment; @@ -49,11 +56,16 @@ import org.springframework.cloud.dataflow.core.dsl.visitor.ComposedTaskRunnerVisitor; import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; import org.springframework.cloud.dataflow.rest.util.DeploymentPropertiesUtils; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.job.LauncherRepository; import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDaoContainer; import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDaoContainer; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskDefinitionException; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskExecutionException; +import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository; import org.springframework.cloud.dataflow.server.repository.TaskExecutionMissingExternalIdException; import org.springframework.cloud.dataflow.server.service.TaskExecutionCreationService; @@ -68,8 +80,8 @@ import org.springframework.cloud.task.listener.TaskException; import org.springframework.cloud.task.listener.TaskExecutionException; import org.springframework.cloud.task.repository.TaskExecution; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.TaskRepository; +import org.springframework.core.env.PropertyResolver; import org.springframework.core.io.Resource; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; @@ -92,6 +104,7 @@ * @author Michael Wirth * @author David Turanski * @author Daniel Serleg + * @author Corneil du Plessis */ @Transactional public class DefaultTaskExecutionService implements TaskExecutionService { @@ -118,7 +131,7 @@ public class DefaultTaskExecutionService implements TaskExecutionService { /** * Used to create TaskExecutions. */ - private final TaskRepository taskRepository; + private final TaskRepositoryContainer taskRepositoryContainer; private final TaskExecutionInfoService taskExecutionInfoService; @@ -128,13 +141,17 @@ public class DefaultTaskExecutionService implements TaskExecutionService { private final TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator; - private final TaskExplorer taskExplorer; + private final AggregateTaskExplorer taskExplorer; - private final DataflowTaskExecutionDao dataflowTaskExecutionDao; + private final DataflowTaskExecutionDaoContainer dataflowTaskExecutionDaoContainer; - private final DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao; + private final DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer; - private OAuth2TokenUtilsService oauth2TokenUtilsService; + private final OAuth2TokenUtilsService oauth2TokenUtilsService; + + private final TaskDefinitionRepository taskDefinitionRepository; + + private final TaskDefinitionReader taskDefinitionReader; private final Map> tasksBeingUpgraded = new ConcurrentHashMap<>(); @@ -144,270 +161,363 @@ public class DefaultTaskExecutionService implements TaskExecutionService { private boolean autoCreateTaskDefinitions; - private TaskConfigurationProperties taskConfigurationProperties; + private final TaskConfigurationProperties taskConfigurationProperties; + + private final ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties; - private ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties; + private final AggregateExecutionSupport aggregateExecutionSupport; + + private final DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao; + + private final PropertyResolver propertyResolver; private static final Pattern TASK_NAME_PATTERN = Pattern.compile("[a-zA-Z]([-a-zA-Z0-9]*[a-zA-Z0-9])?"); + private static final String TASK_NAME_VALIDATION_MSG = "Task name must consist of alphanumeric characters " + - "or '-', start with an alphabetic character, and end with an alphanumeric character (e.g. 'my-name', " + - "or 'abc-123')"; + "or '-', start with an alphabetic character, and end with an alphanumeric character (e.g. 'my-name', " + + "or 'abc-123')"; /** * Initializes the {@link DefaultTaskExecutionService}. * - * @param launcherRepository the repository of task launcher used to launch task apps. - * @param auditRecordService the audit record service - * @param taskRepository the repository to use for accessing and updating task executions - * @param taskExecutionInfoService the task execution info service - * @param taskDeploymentRepository the repository to track task deployment - * @param taskExecutionInfoService the service used to setup a task execution - * @param taskExecutionRepositoryService the service used to create the task execution - * @param taskAppDeploymentRequestCreator the task app deployment request creator - * @param taskExplorer the task explorer - * @param dataflowTaskExecutionDao the dataflow task execution dao - * @param dataflowTaskExecutionMetadataDao repository used to manipulate task manifests - * @param oauth2TokenUtilsService the oauth2 token server - * @param taskSaveService the task save service + * @param propertyResolver the spring application context + * @param launcherRepository the repository of task launcher used to launch task apps. + * @param auditRecordService the audit record service + * @param taskRepositoryContainer the container of repositories to use for accessing and updating task executions + * @param taskExecutionInfoService the service used to setup a task execution + * @param taskDeploymentRepository the repository to track task deployment + * @param taskDefinitionRepository the repository to query the task definition + * @param taskDefinitionReader use task definition repository to retrieve definition + * @param taskExecutionRepositoryService the service used to create the task execution + * @param taskAppDeploymentRequestCreator the task app deployment request creator + * @param taskExplorer the task explorer + * @param dataflowTaskExecutionDaoContainer the dataflow task execution dao + * @param dataflowTaskExecutionMetadataDaoContainer repository used to manipulate task manifests + * @param dataflowTaskExecutionQueryDao repository to query aggregate TaskExecution data + * @param oauth2TokenUtilsService the oauth2 token server + * @param taskSaveService the task save service + * @param taskConfigurationProperties task configuration properties. + * @param aggregateExecutionSupport support for selecting SchemaVersionTarget */ @Deprecated - public DefaultTaskExecutionService(LauncherRepository launcherRepository, - AuditRecordService auditRecordService, - TaskRepository taskRepository, - TaskExecutionInfoService taskExecutionInfoService, - TaskDeploymentRepository taskDeploymentRepository, - TaskExecutionCreationService taskExecutionRepositoryService, - TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, - TaskExplorer taskExplorer, - DataflowTaskExecutionDao dataflowTaskExecutionDao, - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, - OAuth2TokenUtilsService oauth2TokenUtilsService, - TaskSaveService taskSaveService, - TaskConfigurationProperties taskConfigurationProperties) { - this(launcherRepository, auditRecordService, taskRepository, taskExecutionInfoService, taskDeploymentRepository, - taskExecutionRepositoryService, taskAppDeploymentRequestCreator, taskExplorer, dataflowTaskExecutionDao, - dataflowTaskExecutionMetadataDao, oauth2TokenUtilsService, taskSaveService, taskConfigurationProperties, - null); + public DefaultTaskExecutionService( + PropertyResolver propertyResolver, + LauncherRepository launcherRepository, + AuditRecordService auditRecordService, + TaskRepositoryContainer taskRepositoryContainer, + TaskExecutionInfoService taskExecutionInfoService, + TaskDeploymentRepository taskDeploymentRepository, + TaskDefinitionRepository taskDefinitionRepository, + TaskDefinitionReader taskDefinitionReader, + TaskExecutionCreationService taskExecutionRepositoryService, + TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, + AggregateTaskExplorer taskExplorer, + DataflowTaskExecutionDaoContainer dataflowTaskExecutionDaoContainer, + DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer, + DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao, + OAuth2TokenUtilsService oauth2TokenUtilsService, + TaskSaveService taskSaveService, + TaskConfigurationProperties taskConfigurationProperties, + AggregateExecutionSupport aggregateExecutionSupport + ) { + this(propertyResolver, + launcherRepository, + auditRecordService, + taskRepositoryContainer, + taskExecutionInfoService, + taskDeploymentRepository, + taskDefinitionRepository, + taskDefinitionReader, + taskExecutionRepositoryService, + taskAppDeploymentRequestCreator, + taskExplorer, + dataflowTaskExecutionDaoContainer, + dataflowTaskExecutionMetadataDaoContainer, + dataflowTaskExecutionQueryDao, + oauth2TokenUtilsService, + taskSaveService, + taskConfigurationProperties, + aggregateExecutionSupport, + null); } /** * Initializes the {@link DefaultTaskExecutionService}. * - * @param launcherRepository the repository of task launcher used to launch task apps. - * @param auditRecordService the audit record service - * @param taskRepository the repository to use for accessing and updating task executions - * @param taskExecutionInfoService the task execution info service - * @param taskDeploymentRepository the repository to track task deployment - * @param taskExecutionInfoService the service used to setup a task execution - * @param taskExecutionRepositoryService the service used to create the task execution - * @param taskAppDeploymentRequestCreator the task app deployment request creator - * @param taskExplorer the task explorer - * @param dataflowTaskExecutionDao the dataflow task execution dao - * @param dataflowTaskExecutionMetadataDao repository used to manipulate task manifests - * @param oauth2TokenUtilsService the oauth2 token server - * @param taskSaveService the task save service + * @param propertyResolver the spring application context + * @param launcherRepository the repository of task launcher used to launch task apps. + * @param auditRecordService the audit record service + * @param taskRepositoryContainer the container of repositories to use for accessing and updating task executions + * @param taskExecutionInfoService the task execution info service + * @param taskDeploymentRepository the repository to track task deployment + * @param taskDefinitionRepository the repository to query the task definition + * @param taskDefinitionReader uses task definition repository to retrieve definition + * @param taskExecutionRepositoryService the service used to create the task execution + * @param taskAppDeploymentRequestCreator the task app deployment request creator + * @param taskExplorer the task explorer + * @param dataflowTaskExecutionDaoContainer the dataflow task execution dao + * @param dataflowTaskExecutionMetadataDaoContainer repository used to manipulate task manifests + * @param dataflowTaskExecutionQueryDao repository to query aggregate task execution data. + * @param oauth2TokenUtilsService the oauth2 token server + * @param taskSaveService the task save service + * @param taskConfigurationProperties task configuration properties + * @param aggregateExecutionSupport support for selecting SchemaVersionTarget. * @param composedTaskRunnerConfigurationProperties properties used to configure the composed task runner */ - public DefaultTaskExecutionService(LauncherRepository launcherRepository, - AuditRecordService auditRecordService, - TaskRepository taskRepository, - TaskExecutionInfoService taskExecutionInfoService, - TaskDeploymentRepository taskDeploymentRepository, - TaskExecutionCreationService taskExecutionRepositoryService, - TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, - TaskExplorer taskExplorer, - DataflowTaskExecutionDao dataflowTaskExecutionDao, - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, - OAuth2TokenUtilsService oauth2TokenUtilsService, - TaskSaveService taskSaveService, - TaskConfigurationProperties taskConfigurationProperties, - ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties) { + public DefaultTaskExecutionService( + PropertyResolver propertyResolver, + LauncherRepository launcherRepository, + AuditRecordService auditRecordService, + TaskRepositoryContainer taskRepositoryContainer, + TaskExecutionInfoService taskExecutionInfoService, + TaskDeploymentRepository taskDeploymentRepository, + TaskDefinitionRepository taskDefinitionRepository, + TaskDefinitionReader taskDefinitionReader, + TaskExecutionCreationService taskExecutionRepositoryService, + TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, + AggregateTaskExplorer taskExplorer, + DataflowTaskExecutionDaoContainer dataflowTaskExecutionDaoContainer, + DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer, + DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao, + OAuth2TokenUtilsService oauth2TokenUtilsService, + TaskSaveService taskSaveService, + TaskConfigurationProperties taskConfigurationProperties, + AggregateExecutionSupport aggregateExecutionSupport, + ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties + ) { + Assert.notNull(propertyResolver, "propertyResolver must not be null"); Assert.notNull(launcherRepository, "launcherRepository must not be null"); Assert.notNull(auditRecordService, "auditRecordService must not be null"); Assert.notNull(taskExecutionInfoService, "taskExecutionInfoService must not be null"); - Assert.notNull(taskRepository, "taskRepository must not be null"); + Assert.notNull(taskRepositoryContainer, "taskRepositoryContainer must not be null"); Assert.notNull(taskExecutionInfoService, "taskExecutionInfoService must not be null"); Assert.notNull(taskDeploymentRepository, "taskDeploymentRepository must not be null"); Assert.notNull(taskExecutionRepositoryService, "taskExecutionRepositoryService must not be null"); Assert.notNull(taskAppDeploymentRequestCreator, "taskAppDeploymentRequestCreator must not be null"); Assert.notNull(taskExplorer, "taskExplorer must not be null"); - Assert.notNull(dataflowTaskExecutionDao, "dataflowTaskExecutionDao must not be null"); - Assert.notNull(dataflowTaskExecutionMetadataDao, "dataflowTaskExecutionMetadataDao must not be null"); + Assert.notNull(dataflowTaskExecutionDaoContainer, "dataflowTaskExecutionDaoContainer must not be null"); + Assert.notNull(dataflowTaskExecutionMetadataDaoContainer, "dataflowTaskExecutionMetadataDaoContainer must not be null"); Assert.notNull(taskSaveService, "taskSaveService must not be null"); Assert.notNull(taskConfigurationProperties, "taskConfigurationProperties must not be null"); + Assert.notNull(aggregateExecutionSupport, "compositeExecutionSupport must not be null"); + Assert.notNull(taskDefinitionRepository, "taskDefinitionRepository must not be null"); + Assert.notNull(taskDefinitionReader, "taskDefinitionReader must not be null"); + this.propertyResolver = propertyResolver; this.oauth2TokenUtilsService = oauth2TokenUtilsService; this.launcherRepository = launcherRepository; this.auditRecordService = auditRecordService; - this.taskRepository = taskRepository; + this.taskRepositoryContainer = taskRepositoryContainer; this.taskExecutionInfoService = taskExecutionInfoService; this.taskDeploymentRepository = taskDeploymentRepository; + this.taskDefinitionRepository = taskDefinitionRepository; + this.taskDefinitionReader = taskDefinitionReader; this.taskExecutionRepositoryService = taskExecutionRepositoryService; this.taskAppDeploymentRequestCreator = taskAppDeploymentRequestCreator; this.taskExplorer = taskExplorer; - this.dataflowTaskExecutionDao = dataflowTaskExecutionDao; - this.dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDao; + this.dataflowTaskExecutionDaoContainer = dataflowTaskExecutionDaoContainer; + this.dataflowTaskExecutionMetadataDaoContainer = dataflowTaskExecutionMetadataDaoContainer; this.taskSaveService = taskSaveService; this.taskConfigurationProperties = taskConfigurationProperties; + this.aggregateExecutionSupport = aggregateExecutionSupport; this.composedTaskRunnerConfigurationProperties = composedTaskRunnerConfigurationProperties; + this.dataflowTaskExecutionQueryDao = dataflowTaskExecutionQueryDao; + } /** * Launch a task. - * @param taskName Name of the task definition or registered task application. - * If a task definition does not exist, one will be created if `autoCreateTask-Definitions` is true. Must not be null or empty. + * + * @param taskName Name of the task definition or registered task application. + * If a task definition does not exist, one will be created if `autoCreateTask-Definitions` is true. Must not be null or empty. * @param taskDeploymentProperties Optional deployment properties. Must not be null. - * @param commandLineArgs Optional runtime commandline argument + * @param commandLineArgs Optional runtime commandline argument * @return the task execution ID. */ @Override - public long executeTask(String taskName, Map taskDeploymentProperties, List commandLineArgs) { + public LaunchResponse executeTask(String taskName, Map taskDeploymentProperties, List commandLineArgs) { // Get platform name and fallback to 'default' String platformName = getPlatform(taskDeploymentProperties); String platformType = StreamSupport.stream(launcherRepository.findAll().spliterator(), true) - .filter(deployer -> deployer.getName().equalsIgnoreCase(platformName)) - .map(Launcher::getType) - .findFirst() - .orElse("unknown"); + .filter(deployer -> deployer.getName().equalsIgnoreCase(platformName)) + .map(Launcher::getType) + .findFirst() + .orElse("unknown"); if (platformType.equals(TaskPlatformFactory.KUBERNETES_PLATFORM_TYPE) && !TASK_NAME_PATTERN.matcher(taskName).matches()) { throw new TaskException(String.format("Task name %s is invalid. %s", taskName, TASK_NAME_VALIDATION_MSG)); } // Naive local state to prevent parallel launches to break things up - if(this.tasksBeingUpgraded.containsKey(taskName)) { + if (this.tasksBeingUpgraded.containsKey(taskName)) { List platforms = this.tasksBeingUpgraded.get(taskName); - if(platforms.contains(platformName)) { + if (platforms.contains(platformName)) { throw new IllegalStateException(String.format( - "Unable to launch %s on platform %s because it is being upgraded", taskName, platformName)); + "Unable to launch %s on platform %s because it is being upgraded", taskName, platformName)); } } Launcher launcher = this.launcherRepository.findByName(platformName); - if(launcher == null) { + if (launcher == null) { throw new IllegalStateException(String.format("No launcher was available for platform %s", platformName)); } validateTaskName(taskName, launcher); // Remove since the key for task platform name will not pass validation for app, // deployer, or scheduler prefix. // Then validate - if (taskDeploymentProperties.containsKey(TASK_PLATFORM_NAME)) { - taskDeploymentProperties.remove(TASK_PLATFORM_NAME); - } - DeploymentPropertiesUtils.validateDeploymentProperties(taskDeploymentProperties); - + Map deploymentProperties = new HashMap<>(taskDeploymentProperties); + deploymentProperties.remove(TASK_PLATFORM_NAME); + DeploymentPropertiesUtils.validateDeploymentProperties(deploymentProperties); TaskDeployment existingTaskDeployment = taskDeploymentRepository - .findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskName); + .findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskName); if (existingTaskDeployment != null) { if (!existingTaskDeployment.getPlatformName().equals(platformName)) { throw new IllegalStateException(String.format( - "Task definition [%s] has already been deployed on platform [%s]. " + - "Requested to deploy on platform [%s].", - taskName, existingTaskDeployment.getPlatformName(), platformName)); + "Task definition [%s] has already been deployed on platform [%s]. " + + "Requested to deploy on platform [%s].", + taskName, existingTaskDeployment.getPlatformName(), platformName)); } } + List commandLineArguments = new ArrayList<>(commandLineArgs); + TaskDefinition taskDefinition = taskDefinitionRepository.findByTaskName(taskName); + + String taskAppName = taskDefinition != null ? taskDefinition.getRegisteredAppName() : taskName; + SchemaVersionTarget schemaVersionTarget = aggregateExecutionSupport.findSchemaVersionTarget(taskAppName, taskDefinition); + Assert.notNull(schemaVersionTarget, "schemaVersionTarget not found for " + taskAppName); + + DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(schemaVersionTarget.getName()); // Get the previous manifest - TaskManifest previousManifest = this.dataflowTaskExecutionMetadataDao.getLatestManifest(taskName); + TaskManifest previousManifest = dataflowTaskExecutionMetadataDao.getLatestManifest(taskName); Map previousTaskDeploymentProperties = previousManifest != null - && previousManifest.getTaskDeploymentRequest() != null - && previousManifest.getTaskDeploymentRequest().getDeploymentProperties() != null - ? previousManifest.getTaskDeploymentRequest().getDeploymentProperties() - : Collections.emptyMap(); + && previousManifest.getTaskDeploymentRequest() != null + && previousManifest.getTaskDeploymentRequest().getDeploymentProperties() != null + ? previousManifest.getTaskDeploymentRequest().getDeploymentProperties() + : Collections.emptyMap(); TaskExecutionInformation taskExecutionInformation = findOrCreateTaskExecutionInformation(taskName, - taskDeploymentProperties, launcher.getType(), previousTaskDeploymentProperties); + deploymentProperties, launcher.getType(), previousTaskDeploymentProperties); - // pre prosess command-line args - // moving things like app.

    The input args are copied and entries that begin with {@code 'app.'} + * are replaced with a {@code 'composed-task-app-arguments.'} + * prefixed entry. The transformed arg will also be converted to Base64 + * if necessary (eg. when it has an {@code =} sign in the value). + * + * @param commandLineArgs The command line arguments to be converted + * @return list of converted command line arguments + */ + static List convertCommandLineArgsToCTRFormat(List commandLineArgs) { + List composedTaskArguments = new ArrayList<>(); + commandLineArgs.forEach(arg -> { + if (arg == null) { + throw new IllegalArgumentException("Command line Arguments for ComposedTaskRunner contain a null entry."); + } + if (arg.startsWith("app.") || arg.startsWith("--app.")) { + if(arg.startsWith("--")) { + arg = arg.substring(2); + } + String[] split = arg.split("=", 2); + // TODO convert key portion of property / argument to spring commandline format. + if (split.length == 2) { + composedTaskArguments.add("--composed-task-app-arguments." + Base64Utils.encode(split[0]) + "=" + split[1]); + } + else { + composedTaskArguments.add("--composed-task-app-arguments." + arg); + } + } + else { + composedTaskArguments.add(arg); + } + }); + return composedTaskArguments; + } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/TaskServicesDateUtils.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/TaskServicesDateUtils.java new file mode 100644 index 0000000000..ae88a243e1 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/TaskServicesDateUtils.java @@ -0,0 +1,47 @@ +/* + * Copyright 2016-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.service.impl; + +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.util.Date; + +import org.springframework.lang.NonNull; + +/** + * Provides date functionality for the task services. + * + * @author Tobias Soloschenko + */ +final class TaskServicesDateUtils { + + private TaskServicesDateUtils() { + } + + /** + * Gets the date representation for the given number of days in the past. + * + * @param numDaysAgo the number of days ago + * @return the date for {@code numDaysAgo} from today at midnight (locally) + */ + public static Date numDaysAgoFromLocalMidnightToday(@NonNull Integer numDaysAgo) { + LocalDateTime localDateTime = LocalDateTime.of(LocalDate.now(), LocalTime.MIDNIGHT).minusDays(numDaysAgo); + return Date.from(localDateTime.atZone(ZoneId.systemDefault()).toInstant()); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/stream/SkipperStreamDeployer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/stream/SkipperStreamDeployer.java index bac939c13b..28c37687e3 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/stream/SkipperStreamDeployer.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/stream/SkipperStreamDeployer.java @@ -41,6 +41,8 @@ import org.slf4j.LoggerFactory; import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.Yaml; +import org.yaml.snakeyaml.constructor.SafeConstructor; +import org.yaml.snakeyaml.representer.Representer; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.core.DataFlowPropertyKeys; @@ -48,9 +50,11 @@ import org.springframework.cloud.dataflow.core.StreamDefinition; import org.springframework.cloud.dataflow.core.StreamDefinitionService; import org.springframework.cloud.dataflow.core.StreamDeployment; +import org.springframework.cloud.dataflow.core.StreamRuntimePropertyKeys; import org.springframework.cloud.dataflow.registry.service.AppRegistryService; import org.springframework.cloud.dataflow.rest.SkipperStream; import org.springframework.cloud.dataflow.server.controller.NoSuchAppException; +import org.springframework.cloud.dataflow.server.controller.NoSuchAppInstanceException; import org.springframework.cloud.dataflow.server.controller.support.InvalidStreamDefinitionException; import org.springframework.cloud.dataflow.server.repository.NoSuchStreamDefinitionException; import org.springframework.cloud.dataflow.server.repository.StreamDefinitionRepository; @@ -63,6 +67,7 @@ import org.springframework.cloud.skipper.SkipperException; import org.springframework.cloud.skipper.client.SkipperClient; import org.springframework.cloud.skipper.domain.AboutResource; +import org.springframework.cloud.skipper.domain.ActuatorPostRequest; import org.springframework.cloud.skipper.domain.ConfigValues; import org.springframework.cloud.skipper.domain.Deployer; import org.springframework.cloud.skipper.domain.Info; @@ -100,578 +105,602 @@ * @author Soby Chacko * @author Glenn Renfro * @author Christian Tzolov + * @author Chris Bono */ public class SkipperStreamDeployer implements StreamDeployer { - private static final Logger logger = LoggerFactory.getLogger(SkipperStreamDeployer.class); - - //Assume version suffix added by skipper is 5 chars. - private static final int MAX_APPNAME_LENGTH = 63-5; - - private final SkipperClient skipperClient; - - private final StreamDefinitionRepository streamDefinitionRepository; - - private final AppRegistryService appRegistryService; - - private final ForkJoinPool forkJoinPool; - - private final StreamDefinitionService streamDefinitionService; - - public SkipperStreamDeployer(SkipperClient skipperClient, StreamDefinitionRepository streamDefinitionRepository, - AppRegistryService appRegistryService, ForkJoinPool forkJoinPool, - StreamDefinitionService streamDefinitionService) { - Assert.notNull(skipperClient, "SkipperClient can not be null"); - Assert.notNull(streamDefinitionRepository, "StreamDefinitionRepository can not be null"); - Assert.notNull(appRegistryService, "StreamDefinitionRepository can not be null"); - Assert.notNull(forkJoinPool, "ForkJoinPool can not be null"); - Assert.notNull(streamDefinitionService, "StreamDefinitionService can not be null"); - this.skipperClient = skipperClient; - this.streamDefinitionRepository = streamDefinitionRepository; - this.appRegistryService = appRegistryService; - this.forkJoinPool = forkJoinPool; - this.streamDefinitionService = streamDefinitionService; - } - - public static List deserializeAppStatus(String platformStatus) { - try { - if (platformStatus != null) { - ObjectMapper mapper = new ObjectMapper(); - mapper.addMixIn(AppStatus.class, AppStatusMixin.class); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - SimpleModule module = new SimpleModule("CustomModel", Version.unknownVersion()); - SimpleAbstractTypeResolver resolver = new SimpleAbstractTypeResolver(); - resolver.addMapping(AppInstanceStatus.class, AppInstanceStatusImpl.class); - module.setAbstractTypes(resolver); - mapper.registerModule(module); - TypeReference> typeRef = new TypeReference>() { - }; - return mapper.readValue(platformStatus, typeRef); - } - return new ArrayList<>(); - } - catch (Exception e) { - logger.error("Could not parse Skipper Platform Status JSON [" + platformStatus + "]. " + - "Exception message = " + e.getMessage()); - return new ArrayList<>(); - } - } - - @Override - public DeploymentState streamState(String streamName) { - return getStreamDeploymentState(streamName); - } - - @Override - public Map streamsStates(List streamDefinitions) { - Map nameToDefinition = new HashMap<>(); - Map states = new HashMap<>(); - List streamNamesList = new ArrayList<>(); - streamDefinitions.stream().forEach(sd -> { - streamNamesList.add(sd.getName()); - nameToDefinition.put(sd.getName(), sd); - }); - String[] streamNames = streamNamesList.toArray(new String[0]); - Map> statuses = this.skipperClient.states(streamNames); - for (Map.Entry entry: nameToDefinition.entrySet()) { - String streamName = entry.getKey(); - if (statuses != null && statuses.containsKey(streamName) && !statuses.get(streamName).isEmpty()) { - states.put(nameToDefinition.get(streamName), - StreamDeployerUtil.aggregateState(new HashSet<>(statuses.get(streamName).values()))); - } - else { - states.put(nameToDefinition.get(streamName), DeploymentState.undeployed); - } - } - return states; - } - - private DeploymentState getStreamDeploymentState(String streamName) { - DeploymentState state = null; - try { - Info info = this.skipperClient.status(streamName); - if (info.getStatus().getPlatformStatus() == null) { - return getDeploymentStateFromStatusInfo(info); - } - List appStatusList = deserializeAppStatus(info.getStatus().getPlatformStatus()); - Set deploymentStateList = appStatusList.stream().map(AppStatus::getState) - .collect(Collectors.toSet()); - state = StreamDeployerUtil.aggregateState(deploymentStateList); - } - catch (ReleaseNotFoundException e) { - // a defined stream but unknown to skipper is considered to be in an undeployed state - if (streamDefinitionExists(streamName)) { - state = DeploymentState.undeployed; - } - } - return state; - } - - private DeploymentState getDeploymentStateFromStatusInfo(Info info) { - DeploymentState result = DeploymentState.unknown; - switch (info.getStatus().getStatusCode()) { - case FAILED: - result = DeploymentState.failed; - break; - case DELETED: - result = DeploymentState.undeployed; - break; - case DEPLOYED: - result = DeploymentState.deployed; - } - return result; - } - - private boolean streamDefinitionExists(String streamName) { - return this.streamDefinitionRepository.findById(streamName).isPresent(); - } - - @Override - public void scale(String streamName, String appName, int count, Map properties) { - this.skipperClient.scale(streamName, ScaleRequest.of(appName, count, properties)); - } - - public Release deployStream(StreamDeploymentRequest streamDeploymentRequest) { - validateStreamDeploymentRequest(streamDeploymentRequest); - Map streamDeployerProperties = streamDeploymentRequest.getStreamDeployerProperties(); - String packageVersion = streamDeployerProperties.get(SkipperStream.SKIPPER_PACKAGE_VERSION); - Assert.isTrue(StringUtils.hasText(packageVersion), "Package Version must be set"); - logger.info("Deploying Stream " + streamDeploymentRequest.getStreamName() + " using skipper."); - String repoName = streamDeployerProperties.get(SkipperStream.SKIPPER_REPO_NAME); - repoName = (StringUtils.hasText(repoName)) ? (repoName) : "local"; - String platformName = streamDeployerProperties.get(SkipperStream.SKIPPER_PLATFORM_NAME); - platformName = determinePlatformName(platformName); - String packageName = streamDeployerProperties.get(SkipperStream.SKIPPER_PACKAGE_NAME); - packageName = (StringUtils.hasText(packageName)) ? packageName : streamDeploymentRequest.getStreamName(); - // Create the package .zip file to upload - File packageFile = createPackageForStream(packageName, packageVersion, streamDeploymentRequest); - // Upload the package - UploadRequest uploadRequest = new UploadRequest(); - uploadRequest.setName(packageName); - uploadRequest.setVersion(packageVersion); - uploadRequest.setExtension("zip"); - uploadRequest.setRepoName(repoName); // TODO use from skipperDeploymentProperties if set. - try { - uploadRequest.setPackageFileAsBytes(Files.readAllBytes(packageFile.toPath())); - } - catch (IOException e) { - throw new IllegalArgumentException("Can't read packageFile " + packageFile, e); - } - skipperClient.upload(uploadRequest); - // Install the package - String streamName = streamDeploymentRequest.getStreamName(); - InstallRequest installRequest = new InstallRequest(); - PackageIdentifier packageIdentifier = new PackageIdentifier(); - packageIdentifier.setPackageName(packageName); - packageIdentifier.setPackageVersion(packageVersion); - packageIdentifier.setRepositoryName(repoName); - installRequest.setPackageIdentifier(packageIdentifier); - InstallProperties installProperties = new InstallProperties(); - installProperties.setPlatformName(platformName); - installProperties.setReleaseName(streamName); - installProperties.setConfigValues(new ConfigValues()); - installRequest.setInstallProperties(installProperties); - Release release = null; - try { - release = this.skipperClient.install(installRequest); - } - catch (Exception e) { - logger.error("Skipper install failed. Deleting the package: " + packageName); - try { - this.skipperClient.packageDelete(packageName); - } - catch (Exception e1) { - logger.error("Package delete threw exception: " + e1.getMessage()); - } - throw new SkipperException(e.getMessage()); - } - // TODO store releasename in deploymentIdRepository... - return release; - } - - private String determinePlatformName(final String platformName) { - Collection deployers = skipperClient.listDeployers(); - if (StringUtils.hasText(platformName)) { - List filteredDeployers = deployers.stream() - .filter(d -> d.getName().equals(platformName)) - .collect(Collectors.toList()); - if (filteredDeployers.size() == 0) { - throw new IllegalArgumentException("No platform named '" + platformName + "'"); - } - else { - return platformName; - } - } - else { - if (deployers.size() == 0) { - throw new IllegalArgumentException("No platforms configured"); - } - else { - String platformNameToUse = deployers.stream().findFirst().get().getName(); - logger.info("Using platform '" + platformNameToUse + "'"); - return platformNameToUse; - } - } - } - - private void validateStreamDeploymentRequest(StreamDeploymentRequest streamDeploymentRequest) { - if (streamDeploymentRequest.getAppDeploymentRequests() == null - || streamDeploymentRequest.getAppDeploymentRequests().isEmpty()) { - // nothing to validate. - return; - } - String streamName = streamDeploymentRequest.getStreamName(); - // throw as at this point we should have definition - StreamDefinition streamDefinition = this.streamDefinitionRepository - .findById(streamName) - .orElseThrow(() -> new NoSuchStreamDefinitionException(streamDeploymentRequest.getStreamName())); - - for (AppDeploymentRequest adr : streamDeploymentRequest.getAppDeploymentRequests()) { - String registeredAppName = getRegisteredName(streamDefinition, adr.getDefinition().getName()); - String appName = String.format("%s-%s-v", streamName, registeredAppName); - if (appName.length() > 40) { - logger.warn("The stream name plus application name [" + appName + "] is longer than 40 characters." + - " This can not exceed " + MAX_APPNAME_LENGTH + " in length."); - } - if (appName.length() > MAX_APPNAME_LENGTH) { - throw new InvalidStreamDefinitionException( - String.format("The runtime application name for the app %s in the stream %s " - + "should not exceed %s in length. The runtime application name is: %s", registeredAppName, streamName, MAX_APPNAME_LENGTH, appName)); - } - String version = this.appRegistryService.getResourceVersion(adr.getResource()); - validateAppVersionIsRegistered(registeredAppName, adr, version); - } - } - - private String getRegisteredName(StreamDefinition streamDefinition, String adrAppName) { - for (StreamAppDefinition appDefinition : this.streamDefinitionService.getAppDefinitions(streamDefinition)) { - if (appDefinition.getName().equals(adrAppName)) { - return appDefinition.getRegisteredAppName(); - } - } - return adrAppName; - } - - public void validateAppVersionIsRegistered(StreamDefinition streamDefinition, AppDeploymentRequest appDeploymentRequest, String appVersion) { - String registeredAppName = getRegisteredName(streamDefinition, appDeploymentRequest.getDefinition().getName()); - this.validateAppVersionIsRegistered(registeredAppName, appDeploymentRequest, appVersion); - } - - private void validateAppVersionIsRegistered(String registeredAppName, AppDeploymentRequest appDeploymentRequest, String appVersion) { - String appTypeString = appDeploymentRequest.getDefinition().getProperties() - .get(DataFlowPropertyKeys.STREAM_APP_TYPE); - ApplicationType applicationType = ApplicationType.valueOf(appTypeString); - if (!this.appRegistryService.appExist(registeredAppName, applicationType, appVersion)) { - throw new IllegalStateException(String.format("The %s:%s:%s app is not registered!", - registeredAppName, appTypeString, appVersion)); - } - } - - private File createPackageForStream(String packageName, String packageVersion, - StreamDeploymentRequest streamDeploymentRequest) { - PackageWriter packageWriter = new DefaultPackageWriter(); - Package pkgtoWrite = createPackage(packageName, packageVersion, streamDeploymentRequest); - Path tempPath; - try { - tempPath = Files.createTempDirectory("streampackages"); - } - catch (IOException e) { - throw new IllegalArgumentException("Can't create temp diroectory"); - } - File outputDirectory = tempPath.toFile(); - - File zipFile = packageWriter.write(pkgtoWrite, outputDirectory); - return zipFile; - } - - private Package createPackage(String packageName, String packageVersion, - StreamDeploymentRequest streamDeploymentRequest) { - Package pkg = new Package(); - PackageMetadata packageMetadata = new PackageMetadata(); - packageMetadata.setApiVersion(SkipperStream.SKIPPER_DEFAULT_API_VERSION); - packageMetadata.setKind(SkipperStream.SKIPPER_DEFAULT_KIND); - packageMetadata.setName(packageName); - packageMetadata.setVersion(packageVersion); - packageMetadata.setMaintainer(SkipperStream.SKIPPER_DEFAULT_MAINTAINER); - packageMetadata.setDescription(streamDeploymentRequest.getDslText()); - pkg.setMetadata(packageMetadata); - pkg.setDependencies(createDependentPackages(packageVersion, streamDeploymentRequest)); - return pkg; - } - - private List createDependentPackages(String packageVersion, - StreamDeploymentRequest streamDeploymentRequest) { - List packageList = new ArrayList<>(); - for (AppDeploymentRequest appDeploymentRequest : streamDeploymentRequest.getAppDeploymentRequests()) { - packageList.add(createDependentPackage(packageVersion, appDeploymentRequest)); - } - return packageList; - } - - private Package createDependentPackage(String packageVersion, AppDeploymentRequest appDeploymentRequest) { - Package pkg = new Package(); - String packageName = appDeploymentRequest.getDefinition().getName(); - - PackageMetadata packageMetadata = new PackageMetadata(); - packageMetadata.setApiVersion(SkipperStream.SKIPPER_DEFAULT_API_VERSION); - packageMetadata.setKind(SkipperStream.SKIPPER_DEFAULT_KIND); - packageMetadata.setName(packageName); - packageMetadata.setVersion(packageVersion); - packageMetadata.setMaintainer(SkipperStream.SKIPPER_DEFAULT_MAINTAINER); - - pkg.setMetadata(packageMetadata); - - ConfigValues configValues = new ConfigValues(); - Map configValueMap = new HashMap<>(); - Map metadataMap = new HashMap<>(); - Map specMap = new HashMap<>(); - - // Add metadata - metadataMap.put("name", packageName); - - // Add spec - String resourceWithoutVersion = this.appRegistryService.getResourceWithoutVersion(appDeploymentRequest.getResource()); - specMap.put("resource", resourceWithoutVersion); - specMap.put("applicationProperties", appDeploymentRequest.getDefinition().getProperties()); - specMap.put("deploymentProperties", appDeploymentRequest.getDeploymentProperties()); - String version = this.appRegistryService.getResourceVersion(appDeploymentRequest.getResource()); - // Add version, including possible override via deploymentProperties - hack to store version in cmdline args - if (appDeploymentRequest.getCommandlineArguments().size() == 1) { - specMap.put("version", appDeploymentRequest.getCommandlineArguments().get(0)); - } - else { - specMap.put("version", version); - } - // Add metadata and spec to top level map - configValueMap.put("metadata", metadataMap); - configValueMap.put("spec", specMap); + private static final Logger logger = LoggerFactory.getLogger(SkipperStreamDeployer.class); + + //Assume version suffix added by skipper is 5 chars. + private static final int MAX_APPNAME_LENGTH = 63 - 5; + + private final SkipperClient skipperClient; + + private final StreamDefinitionRepository streamDefinitionRepository; + + private final AppRegistryService appRegistryService; + + private final ForkJoinPool forkJoinPool; + + private final StreamDefinitionService streamDefinitionService; + + public SkipperStreamDeployer(SkipperClient skipperClient, StreamDefinitionRepository streamDefinitionRepository, + AppRegistryService appRegistryService, ForkJoinPool forkJoinPool, + StreamDefinitionService streamDefinitionService) { + Assert.notNull(skipperClient, "SkipperClient can not be null"); + Assert.notNull(streamDefinitionRepository, "StreamDefinitionRepository can not be null"); + Assert.notNull(appRegistryService, "StreamDefinitionRepository can not be null"); + Assert.notNull(forkJoinPool, "ForkJoinPool can not be null"); + Assert.notNull(streamDefinitionService, "StreamDefinitionService can not be null"); + this.skipperClient = skipperClient; + this.streamDefinitionRepository = streamDefinitionRepository; + this.appRegistryService = appRegistryService; + this.forkJoinPool = forkJoinPool; + this.streamDefinitionService = streamDefinitionService; + } + + public static List deserializeAppStatus(String platformStatus) { + try { + if (platformStatus != null) { + ObjectMapper mapper = new ObjectMapper(); + mapper.addMixIn(AppStatus.class, AppStatusMixin.class); + mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + SimpleModule module = new SimpleModule("CustomModel", Version.unknownVersion()); + SimpleAbstractTypeResolver resolver = new SimpleAbstractTypeResolver(); + resolver.addMapping(AppInstanceStatus.class, AppInstanceStatusImpl.class); + module.setAbstractTypes(resolver); + mapper.registerModule(module); + TypeReference> typeRef = new TypeReference>() { + }; + return mapper.readValue(platformStatus, typeRef); + } + return new ArrayList<>(); + } catch (Exception e) { + logger.error("Could not parse Skipper Platform Status JSON [" + platformStatus + "]. " + + "Exception message = " + e.getMessage()); + return new ArrayList<>(); + } + } + + @Override + public DeploymentState streamState(String streamName) { + return getStreamDeploymentState(streamName); + } + + @Override + public Map streamsStates(List streamDefinitions) { + Map nameToDefinition = new HashMap<>(); + Map states = new HashMap<>(); + List streamNamesList = new ArrayList<>(); + streamDefinitions.stream().forEach(sd -> { + streamNamesList.add(sd.getName()); + nameToDefinition.put(sd.getName(), sd); + }); + String[] streamNames = streamNamesList.toArray(new String[0]); + Map> statuses = this.skipperClient.states(streamNames); + for (Map.Entry entry : nameToDefinition.entrySet()) { + String streamName = entry.getKey(); + if (statuses != null && statuses.containsKey(streamName) && !statuses.get(streamName).isEmpty()) { + states.put(nameToDefinition.get(streamName), + StreamDeployerUtil.aggregateState(new HashSet<>(statuses.get(streamName).values()))); + } else { + states.put(nameToDefinition.get(streamName), DeploymentState.undeployed); + } + } + return states; + } + + private DeploymentState getStreamDeploymentState(String streamName) { + DeploymentState state = null; + try { + Info info = this.skipperClient.status(streamName); + if (info.getStatus().getPlatformStatus() == null) { + return getDeploymentStateFromStatusInfo(info); + } + List appStatusList = deserializeAppStatus(info.getStatus().getPlatformStatus()); + Set deploymentStateList = appStatusList.stream().map(AppStatus::getState) + .collect(Collectors.toSet()); + state = StreamDeployerUtil.aggregateState(deploymentStateList); + } catch (ReleaseNotFoundException e) { + // a defined stream but unknown to skipper is considered to be in an undeployed state + if (streamDefinitionExists(streamName)) { + state = DeploymentState.undeployed; + } + } + return state; + } + + private DeploymentState getDeploymentStateFromStatusInfo(Info info) { + DeploymentState result = DeploymentState.unknown; + switch (info.getStatus().getStatusCode()) { + case FAILED: + result = DeploymentState.failed; + break; + case DELETED: + result = DeploymentState.undeployed; + break; + case DEPLOYED: + result = DeploymentState.deployed; + } + return result; + } + + private boolean streamDefinitionExists(String streamName) { + return this.streamDefinitionRepository.findById(streamName).isPresent(); + } + + @Override + public void scale(String streamName, String appName, int count, Map properties) { + this.skipperClient.scale(streamName, ScaleRequest.of(appName, count, properties)); + } + + public Release deployStream(StreamDeploymentRequest streamDeploymentRequest) { + validateStreamDeploymentRequest(streamDeploymentRequest); + Map streamDeployerProperties = streamDeploymentRequest.getStreamDeployerProperties(); + String packageVersion = streamDeployerProperties.get(SkipperStream.SKIPPER_PACKAGE_VERSION); + Assert.isTrue(StringUtils.hasText(packageVersion), "Package Version must be set"); + logger.info("Deploying Stream " + streamDeploymentRequest.getStreamName() + " using skipper."); + String repoName = streamDeployerProperties.get(SkipperStream.SKIPPER_REPO_NAME); + repoName = (StringUtils.hasText(repoName)) ? (repoName) : "local"; + String platformName = streamDeployerProperties.get(SkipperStream.SKIPPER_PLATFORM_NAME); + platformName = determinePlatformName(platformName); + String packageName = streamDeployerProperties.get(SkipperStream.SKIPPER_PACKAGE_NAME); + packageName = (StringUtils.hasText(packageName)) ? packageName : streamDeploymentRequest.getStreamName(); + // Create the package .zip file to upload + File packageFile = createPackageForStream(packageName, packageVersion, streamDeploymentRequest); + // Upload the package + UploadRequest uploadRequest = new UploadRequest(); + uploadRequest.setName(packageName); + uploadRequest.setVersion(packageVersion); + uploadRequest.setExtension("zip"); + uploadRequest.setRepoName(repoName); // TODO use from skipperDeploymentProperties if set. + try { + uploadRequest.setPackageFileAsBytes(Files.readAllBytes(packageFile.toPath())); + } catch (IOException e) { + throw new IllegalArgumentException("Can't read packageFile " + packageFile, e); + } + skipperClient.upload(uploadRequest); + // Install the package + String streamName = streamDeploymentRequest.getStreamName(); + InstallRequest installRequest = new InstallRequest(); + PackageIdentifier packageIdentifier = new PackageIdentifier(); + packageIdentifier.setPackageName(packageName); + packageIdentifier.setPackageVersion(packageVersion); + packageIdentifier.setRepositoryName(repoName); + installRequest.setPackageIdentifier(packageIdentifier); + InstallProperties installProperties = new InstallProperties(); + installProperties.setPlatformName(platformName); + installProperties.setReleaseName(streamName); + installProperties.setConfigValues(new ConfigValues()); + installRequest.setInstallProperties(installProperties); + Release release = null; + try { + release = this.skipperClient.install(installRequest); + } catch (Exception e) { + logger.error("Skipper install failed. Deleting the package: " + packageName); + try { + this.skipperClient.packageDelete(packageName); + } catch (Exception e1) { + logger.error("Package delete threw exception: " + e1.getMessage()); + } + throw new SkipperException(e.getMessage()); + } + // TODO store releasename in deploymentIdRepository... + return release; + } + + private String determinePlatformName(final String platformName) { + Collection deployers = skipperClient.listDeployers(); + if (StringUtils.hasText(platformName)) { + List filteredDeployers = deployers.stream() + .filter(d -> d.getName().equals(platformName)) + .collect(Collectors.toList()); + if (filteredDeployers.size() == 0) { + throw new IllegalArgumentException("No platform named '" + platformName + "'"); + } else { + return platformName; + } + } else { + if (deployers.size() == 0) { + throw new IllegalArgumentException("No platforms configured"); + } else { + String platformNameToUse = deployers.stream().findFirst().get().getName(); + logger.info("Using platform '" + platformNameToUse + "'"); + return platformNameToUse; + } + } + } + + private void validateStreamDeploymentRequest(StreamDeploymentRequest streamDeploymentRequest) { + if (streamDeploymentRequest.getAppDeploymentRequests() == null + || streamDeploymentRequest.getAppDeploymentRequests().isEmpty()) { + // nothing to validate. + return; + } + String streamName = streamDeploymentRequest.getStreamName(); + // throw as at this point we should have definition + StreamDefinition streamDefinition = this.streamDefinitionRepository + .findById(streamName) + .orElseThrow(() -> new NoSuchStreamDefinitionException(streamDeploymentRequest.getStreamName())); + + for (AppDeploymentRequest adr : streamDeploymentRequest.getAppDeploymentRequests()) { + String registeredAppName = getRegisteredName(streamDefinition, adr.getDefinition().getName()); + String appName = String.format("%s-%s-v", streamName, registeredAppName); + if (appName.length() > 40) { + logger.warn("The stream name plus application name [" + appName + "] is longer than 40 characters." + + " This can not exceed " + MAX_APPNAME_LENGTH + " in length."); + } + if (appName.length() > MAX_APPNAME_LENGTH) { + throw new InvalidStreamDefinitionException( + String.format("The runtime application name for the app %s in the stream %s " + + "should not exceed %s in length. The runtime application name is: %s", registeredAppName, streamName, MAX_APPNAME_LENGTH, appName)); + } + String version = this.appRegistryService.getResourceVersion(adr.getResource()); + validateAppVersionIsRegistered(registeredAppName, adr, version); + } + } + + private String getRegisteredName(StreamDefinition streamDefinition, String adrAppName) { + for (StreamAppDefinition appDefinition : this.streamDefinitionService.getAppDefinitions(streamDefinition)) { + if (appDefinition.getName().equals(adrAppName)) { + return appDefinition.getRegisteredAppName(); + } + } + return adrAppName; + } + + public void validateAppVersionIsRegistered(StreamDefinition streamDefinition, AppDeploymentRequest appDeploymentRequest, String appVersion) { + String registeredAppName = getRegisteredName(streamDefinition, appDeploymentRequest.getDefinition().getName()); + this.validateAppVersionIsRegistered(registeredAppName, appDeploymentRequest, appVersion); + } + + private void validateAppVersionIsRegistered(String registeredAppName, AppDeploymentRequest appDeploymentRequest, String appVersion) { + String appTypeString = appDeploymentRequest.getDefinition().getProperties() + .get(DataFlowPropertyKeys.STREAM_APP_TYPE); + ApplicationType applicationType = ApplicationType.valueOf(appTypeString); + if (!this.appRegistryService.appExist(registeredAppName, applicationType, appVersion)) { + throw new IllegalStateException(String.format("The %s:%s:%s app is not registered!", + registeredAppName, appTypeString, appVersion)); + } + } + + private File createPackageForStream(String packageName, String packageVersion, + StreamDeploymentRequest streamDeploymentRequest) { + PackageWriter packageWriter = new DefaultPackageWriter(); + Package pkgtoWrite = createPackage(packageName, packageVersion, streamDeploymentRequest); + Path tempPath; + try { + tempPath = Files.createTempDirectory("streampackages"); + } catch (IOException e) { + throw new IllegalArgumentException("Can't create temp diroectory"); + } + File outputDirectory = tempPath.toFile(); + + File zipFile = packageWriter.write(pkgtoWrite, outputDirectory); + return zipFile; + } + + private Package createPackage(String packageName, String packageVersion, + StreamDeploymentRequest streamDeploymentRequest) { + Package pkg = new Package(); + PackageMetadata packageMetadata = new PackageMetadata(); + packageMetadata.setApiVersion(SkipperStream.SKIPPER_DEFAULT_API_VERSION); + packageMetadata.setKind(SkipperStream.SKIPPER_DEFAULT_KIND); + packageMetadata.setName(packageName); + packageMetadata.setVersion(packageVersion); + packageMetadata.setMaintainer(SkipperStream.SKIPPER_DEFAULT_MAINTAINER); + packageMetadata.setDescription(streamDeploymentRequest.getDslText()); + pkg.setMetadata(packageMetadata); + pkg.setDependencies(createDependentPackages(packageVersion, streamDeploymentRequest)); + return pkg; + } + + private List createDependentPackages(String packageVersion, + StreamDeploymentRequest streamDeploymentRequest) { + List packageList = new ArrayList<>(); + for (AppDeploymentRequest appDeploymentRequest : streamDeploymentRequest.getAppDeploymentRequests()) { + packageList.add(createDependentPackage(packageVersion, appDeploymentRequest)); + } + return packageList; + } + + private Package createDependentPackage(String packageVersion, AppDeploymentRequest appDeploymentRequest) { + Package pkg = new Package(); + String packageName = appDeploymentRequest.getDefinition().getName(); + + PackageMetadata packageMetadata = new PackageMetadata(); + packageMetadata.setApiVersion(SkipperStream.SKIPPER_DEFAULT_API_VERSION); + packageMetadata.setKind(SkipperStream.SKIPPER_DEFAULT_KIND); + packageMetadata.setName(packageName); + packageMetadata.setVersion(packageVersion); + packageMetadata.setMaintainer(SkipperStream.SKIPPER_DEFAULT_MAINTAINER); + + pkg.setMetadata(packageMetadata); + + ConfigValues configValues = new ConfigValues(); + Map configValueMap = new HashMap<>(); + Map metadataMap = new HashMap<>(); + Map specMap = new HashMap<>(); + + // Add metadata + metadataMap.put("name", packageName); + + // Add spec + String resourceWithoutVersion = this.appRegistryService.getResourceWithoutVersion(appDeploymentRequest.getResource()); + specMap.put("resource", resourceWithoutVersion); + specMap.put("applicationProperties", appDeploymentRequest.getDefinition().getProperties()); + specMap.put("deploymentProperties", appDeploymentRequest.getDeploymentProperties()); + String version = this.appRegistryService.getResourceVersion(appDeploymentRequest.getResource()); + // Add version, including possible override via deploymentProperties - hack to store version in cmdline args + if (appDeploymentRequest.getCommandlineArguments().size() == 1) { + specMap.put("version", appDeploymentRequest.getCommandlineArguments().get(0)); + } else { + specMap.put("version", version); + } + // Add metadata and spec to top level map + configValueMap.put("metadata", metadataMap); + configValueMap.put("spec", specMap); DumperOptions dumperOptions = new DumperOptions(); dumperOptions.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK); dumperOptions.setDefaultScalarStyle(DumperOptions.ScalarStyle.DOUBLE_QUOTED); dumperOptions.setPrettyFlow(false); dumperOptions.setSplitLines(false); - Yaml yaml = new Yaml(dumperOptions); + Yaml yaml = new Yaml(new SafeConstructor(), new Representer(dumperOptions), dumperOptions); configValues.setRaw(yaml.dump(configValueMap)); - pkg.setConfigValues(configValues); - pkg.setTemplates(createGenericTemplate()); - return pkg; - - } - - private List