From 4b1f164b058231116e6304b6f0fd4116a685a87f Mon Sep 17 00:00:00 2001 From: Ivan Kustau <86599591+IvanKustau@users.noreply.github.com> Date: Fri, 17 Feb 2023 16:55:04 +0300 Subject: [PATCH 01/15] EPMRPP-81362 || Fix security vulnerabilities (#58) --- Dockerfile | 9 +++++---- build.gradle | 4 +++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index a2e5cea..0ed25f2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,9 +1,10 @@ -FROM alpine:latest +FROM amazoncorretto:11.0.17 LABEL version=5.7.3 description="EPAM Report portal. Service jobs" maintainer="Andrei Varabyeu , Hleb Kanonik " ARG GH_TOKEN -RUN apk -U -q upgrade && apk --no-cache -q add openjdk11 ca-certificates && \ - echo 'exec java ${JAVA_OPTS} -jar service-jobs-5.7.3-exec.jar' > /start.sh && chmod +x /start.sh && \ - wget --header="Authorization: Bearer ${GH_TOKEN}" -q https://maven.pkg.github.com/reportportal/service-jobs/com/epam/reportportal/service-jobs/5.7.3/service-jobs-5.7.3-exec.jar +ARG GH_URL=https://__:$GH_TOKEN@maven.pkg.github.com/reportportal/service-jobs/com/epam/reportportal/service-jobs/5.7.3/service-jobs-5.7.3-exec.jar +RUN curl -O -L $GH_URL \ + --output service-jobs-5.7.3-exec.jar && \ + echo 'exec java ${JAVA_OPTS} -jar service-jobs-5.7.3-exec.jar' > /start.sh && chmod +x /start.sh ENV JAVA_OPTS="-Xmx512m -XX:+UseG1GC -XX:InitiatingHeapOccupancyPercent=70 -Djava.security.egd=file:/dev/./urandom" VOLUME ["/tmp"] EXPOSE 8080 diff --git a/build.gradle b/build.gradle index 8ef9b9f..ac00754 100644 --- a/build.gradle +++ b/build.gradle @@ -1,5 +1,5 @@ plugins { - id 'org.springframework.boot' version '2.5.14' + id 'org.springframework.boot' version '2.7.0' id 'io.spring.dependency-management' version '1.0.11.RELEASE' id 'java' } @@ -79,6 +79,8 @@ dependencies { // implementation 'com.google.guava:guava:30.0-jre'; compile "com.rabbitmq:http-client:2.1.0.RELEASE" + //Fix CVE + implementation 'com.fasterxml.jackson.core:jackson-databind:2.13.4.2' runtimeOnly 'org.postgresql:postgresql' From 4bf1523daeb1d05683ac22255707b27018040e1f Mon Sep 17 00:00:00 2001 From: Ivan Kustau <86599591+IvanKustau@users.noreply.github.com> Date: Mon, 20 Feb 2023 12:46:42 +0300 Subject: [PATCH 02/15] EPMRPP-81362 || Update gson version to make able jcloud work (#59) --- build.gradle | 2 ++ 1 file changed, 2 insertions(+) diff --git a/build.gradle b/build.gradle index ac00754..a104ef8 100644 --- a/build.gradle +++ b/build.gradle @@ -74,6 +74,8 @@ dependencies { implementation 'org.springframework.boot:spring-boot-starter-amqp' implementation 'org.apache.jclouds.api:s3:2.5.0' implementation 'org.apache.jclouds.provider:aws-s3:2.5.0' + //Needed for correct jcloud work + implementation 'com.google.code.gson:gson:2.8.9' implementation 'org.apache.httpcomponents:httpclient:4.5.13' // https://avd.aquasec.com/nvd/cve-2020-8908 // implementation 'com.google.guava:guava:30.0-jre'; From 065c49fd54c47e421469664352a903f6f1796e01 Mon Sep 17 00:00:00 2001 From: Ivan Kustau <86599591+IvanKustau@users.noreply.github.com> Date: Wed, 5 Apr 2023 16:31:56 +0300 Subject: [PATCH 03/15] Merge master to 5.7.5 (#66) * EPMRPP-80865|| Update bom version * [Gradle Release Plugin] - new version commit: '5.7.5'. * EPMRPP-82673-exec-jar promote.yml update (added exec jar) --------- Co-authored-by: miracle8484 <76156909+miracle8484@users.noreply.github.com> Co-authored-by: reportportal.io Co-authored-by: rkukharenka Co-authored-by: Ryhor <125865748+rkukharenka@users.noreply.github.com> --- .github/workflows/promote.yml | 2 +- .github/workflows/release.yml | 2 +- Dockerfile | 6 +++--- gradle.properties | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/promote.yml b/.github/workflows/promote.yml index 0174d39..6233633 100644 --- a/.github/workflows/promote.yml +++ b/.github/workflows/promote.yml @@ -23,7 +23,7 @@ on: env: REPOSITORY_URL: 'https://maven.pkg.github.com' UPSTREAM_REPOSITORY_URL: 'https://oss.sonatype.org' - PACKAGE_SUFFIXES: '-javadoc.jar,-javadoc.jar.asc,-sources.jar,-sources.jar.asc,.jar,.jar.asc,.pom,.pom.asc' + PACKAGE_SUFFIXES: '-exec.jar,-exec.jar.asc,-javadoc.jar,-javadoc.jar.asc,-sources.jar,-sources.jar.asc,.jar,.jar.asc,.pom,.pom.asc' PACKAGE: 'com.epam.reportportal' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index fecac99..744523d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,7 +12,7 @@ on: env: GH_USER_NAME: github.actor SCRIPTS_VERSION: 5.7.0 - BOM_VERSION: 5.7.3 + BOM_VERSION: 5.7.4 REPOSITORY_URL: 'https://maven.pkg.github.com/' jobs: diff --git a/Dockerfile b/Dockerfile index 0ed25f2..dcdfde6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,10 @@ FROM amazoncorretto:11.0.17 -LABEL version=5.7.3 description="EPAM Report portal. Service jobs" maintainer="Andrei Varabyeu , Hleb Kanonik " +LABEL version=5.7.4 description="EPAM Report portal. Service jobs" maintainer="Andrei Varabyeu , Hleb Kanonik " ARG GH_TOKEN -ARG GH_URL=https://__:$GH_TOKEN@maven.pkg.github.com/reportportal/service-jobs/com/epam/reportportal/service-jobs/5.7.3/service-jobs-5.7.3-exec.jar +ARG GH_URL=https://__:$GH_TOKEN@maven.pkg.github.com/reportportal/service-jobs/com/epam/reportportal/service-jobs/5.7.4/service-jobs-5.7.4-exec.jar RUN curl -O -L $GH_URL \ --output service-jobs-5.7.3-exec.jar && \ - echo 'exec java ${JAVA_OPTS} -jar service-jobs-5.7.3-exec.jar' > /start.sh && chmod +x /start.sh + echo 'exec java ${JAVA_OPTS} -jar service-jobs-5.7.4-exec.jar' > /start.sh && chmod +x /start.sh ENV JAVA_OPTS="-Xmx512m -XX:+UseG1GC -XX:InitiatingHeapOccupancyPercent=70 -Djava.security.egd=file:/dev/./urandom" VOLUME ["/tmp"] EXPOSE 8080 diff --git a/gradle.properties b/gradle.properties index f947c21..8a54ff2 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,4 +1,4 @@ -version=5.7.4 +version=5.7.5 description=EPAM Report portal. Service jobs dockerServerUrl=unix:///var/run/docker.sock dockerPrepareEnvironment=apk -U -q upgrade && apk --no-cache -q add openjdk11 ca-certificates From 74a0ab331c9aead7e5e84a3366170f8fd7571a71 Mon Sep 17 00:00:00 2001 From: Andrei Piankouski Date: Wed, 12 Apr 2023 11:23:29 +0300 Subject: [PATCH 04/15] Update version --- gradle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gradle.properties b/gradle.properties index 8a54ff2..81f77b1 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,4 +1,4 @@ -version=5.7.5 +version=5.8.0 description=EPAM Report portal. Service jobs dockerServerUrl=unix:///var/run/docker.sock dockerPrepareEnvironment=apk -U -q upgrade && apk --no-cache -q add openjdk11 ca-certificates From 43f0ca7f8bb67fcb0e60f6676c899774648e4cec Mon Sep 17 00:00:00 2001 From: Ivan Kustau <86599591+IvanKustau@users.noreply.github.com> Date: Mon, 17 Apr 2023 16:34:30 +0300 Subject: [PATCH 05/15] EPMRPP-82707 || Add single bucket configuration (#67) * EPMRPP-82707 || Add single bucket configuration * EPMRPP-82707 || Refactor according to checkstyle --- .../config/DataStorageConfig.java | 314 ++++++++++-------- .../storage/S3DataStorageService.java | 84 +++-- .../epam/reportportal/utils/FeatureFlag.java | 35 ++ .../utils/FeatureFlagHandler.java | 37 +++ src/main/resources/application.yml | 6 + 5 files changed, 313 insertions(+), 163 deletions(-) create mode 100644 src/main/java/com/epam/reportportal/utils/FeatureFlag.java create mode 100644 src/main/java/com/epam/reportportal/utils/FeatureFlagHandler.java diff --git a/src/main/java/com/epam/reportportal/config/DataStorageConfig.java b/src/main/java/com/epam/reportportal/config/DataStorageConfig.java index 74e15ef..050290c 100644 --- a/src/main/java/com/epam/reportportal/config/DataStorageConfig.java +++ b/src/main/java/com/epam/reportportal/config/DataStorageConfig.java @@ -1,14 +1,32 @@ +/* + * Copyright 2019 EPAM Systems + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package com.epam.reportportal.config; import com.epam.reportportal.storage.DataStorageService; import com.epam.reportportal.storage.LocalDataStorageService; import com.epam.reportportal.storage.S3DataStorageService; +import com.epam.reportportal.utils.FeatureFlagHandler; import com.google.common.base.Optional; import com.google.common.base.Supplier; import com.google.common.cache.CacheLoader; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.inject.Module; +import java.util.Set; import org.jclouds.ContextBuilder; import org.jclouds.aws.s3.config.AWSS3HttpApiModule; import org.jclouds.blobstore.BlobStore; @@ -23,137 +41,173 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; -import java.util.Set; - +/** + * Blob storage configuration. + * + * @author Dzianis_Shybeka + */ @Configuration public class DataStorageConfig { - /** - * Amazon has a general work flow they publish that allows clients to always find the correct URL endpoint for a given bucket: - * 1) ask s3.amazonaws.com for the bucket location - * 2) use the url returned to make the container specific request (get/put, etc) - * Jclouds cache the results from the first getBucketLocation call and use that region-specific URL, as needed. - * In this custom implementation of {@link AWSS3HttpApiModule} we are providing location from environment variable, so that - * we don't need to make getBucketLocation call - */ - @ConfiguresHttpApi - private static class CustomBucketToRegionModule extends AWSS3HttpApiModule { - private final String region; - - public CustomBucketToRegionModule(String region) { - this.region = region; - } - - @Override - @SuppressWarnings("Guava") - protected CacheLoader> bucketToRegion(Supplier> regionSupplier, S3Client client) { - Set regions = regionSupplier.get(); - if (regions.isEmpty()) { - return new CacheLoader<>() { - - @Override - @SuppressWarnings({ "Guava", "NullableProblems" }) - public Optional load(String bucket) { - if (CustomBucketToRegionModule.this.region != null) { - return Optional.of(CustomBucketToRegionModule.this.region); - } - return Optional.absent(); - } - - @Override - public String toString() { - return "noRegions()"; - } - }; - } else if (regions.size() == 1) { - final String onlyRegion = Iterables.getOnlyElement(regions); - return new CacheLoader<>() { - @SuppressWarnings("OptionalUsedAsFieldOrParameterType") - final Optional onlyRegionOption = Optional.of(onlyRegion); - - @Override - @SuppressWarnings("NullableProblems") - public Optional load(String bucket) { - if (CustomBucketToRegionModule.this.region != null) { - return Optional.of(CustomBucketToRegionModule.this.region); - } - return onlyRegionOption; - } - - @Override - public String toString() { - return "onlyRegion(" + onlyRegion + ")"; - } - }; - } else { - return new CacheLoader<>() { - @Override - @SuppressWarnings("NullableProblems") - public Optional load(String bucket) { - if (CustomBucketToRegionModule.this.region != null) { - return Optional.of(CustomBucketToRegionModule.this.region); - } - try { - return Optional.fromNullable(client.getBucketLocation(bucket)); - } catch (ContainerNotFoundException e) { - return Optional.absent(); - } - } - - @Override - public String toString() { - return "bucketToRegion()"; - } - }; - } - } - } - - @Bean - @ConditionalOnProperty(name = "datastore.type", havingValue = "filesystem") - public DataStorageService localDataStore(@Value("${datastore.default.path:/data/store}") String storagePath) { - return new LocalDataStorageService(storagePath); - } - - @Bean - @ConditionalOnProperty(name = "datastore.type", havingValue = "minio") - public BlobStore minioBlobStore(@Value("${datastore.minio.accessKey}") String accessKey, - @Value("${datastore.minio.secretKey}") String secretKey, @Value("${datastore.minio.endpoint}") String endpoint) { - - BlobStoreContext blobStoreContext = ContextBuilder.newBuilder("s3") - .endpoint(endpoint) - .credentials(accessKey, secretKey) - .buildView(BlobStoreContext.class); - - return blobStoreContext.getBlobStore(); - } - - @Bean - @ConditionalOnProperty(name = "datastore.type", havingValue = "minio") - public DataStorageService minioDataStore(@Autowired BlobStore blobStore, @Value("${datastore.minio.bucketPrefix}") String bucketPrefix, - @Value("${datastore.minio.defaultBucketName}") String defaultBucketName) { - return new S3DataStorageService(blobStore, bucketPrefix, defaultBucketName); - } - - @Bean - @ConditionalOnProperty(name = "datastore.type", havingValue = "s3") - public BlobStore blobStore(@Value("${datastore.s3.accessKey}") String accessKey, @Value("${datastore.s3.secretKey}") String secretKey, - @Value("${datastore.s3.region}") String region) { - Iterable modules = ImmutableSet.of(new CustomBucketToRegionModule(region)); - - BlobStoreContext blobStoreContext = ContextBuilder.newBuilder("aws-s3") - .modules(modules) - .credentials(accessKey, secretKey) - .buildView(BlobStoreContext.class); - - return blobStoreContext.getBlobStore(); - } - - @Bean - @Primary - @ConditionalOnProperty(name = "datastore.type", havingValue = "s3") - public DataStorageService s3DataStore(@Autowired BlobStore blobStore, @Value("${datastore.s3.bucketPrefix}") String bucketPrefix, - @Value("${datastore.s3.defaultBucketName}") String defaultBucketName) { - return new S3DataStorageService(blobStore, bucketPrefix, defaultBucketName); - } + /** + * Amazon has a general work flow they publish that allows clients to always find the correct + * URL endpoint for a given bucket: + * 1) ask s3.amazonaws.com for the bucket location + * 2) use the url returned to make the container specific request (get/put, etc) + * Jclouds cache the results from the first getBucketLocation call and use that + * region-specific URL, as needed. + * In this custom implementation of {@link AWSS3HttpApiModule} we are providing location + * from environment variable, so that we don't need to make getBucketLocation call + */ + @ConfiguresHttpApi + private static class CustomBucketToRegionModule extends AWSS3HttpApiModule { + private final String region; + + public CustomBucketToRegionModule(String region) { + this.region = region; + } + + @Override + @SuppressWarnings("Guava") + protected CacheLoader> bucketToRegion( + Supplier> regionSupplier, S3Client client) { + Set regions = regionSupplier.get(); + if (regions.isEmpty()) { + return new CacheLoader<>() { + + @Override + @SuppressWarnings({ "Guava", "NullableProblems" }) + public Optional load(String bucket) { + if (CustomBucketToRegionModule.this.region != null) { + return Optional.of(CustomBucketToRegionModule.this.region); + } + return Optional.absent(); + } + + @Override + public String toString() { + return "noRegions()"; + } + }; + } else if (regions.size() == 1) { + final String onlyRegion = Iterables.getOnlyElement(regions); + return new CacheLoader<>() { + @SuppressWarnings("OptionalUsedAsFieldOrParameterType") + final Optional onlyRegionOption = Optional.of(onlyRegion); + + @Override + @SuppressWarnings("NullableProblems") + public Optional load(String bucket) { + if (CustomBucketToRegionModule.this.region != null) { + return Optional.of(CustomBucketToRegionModule.this.region); + } + return onlyRegionOption; + } + + @Override + public String toString() { + return "onlyRegion(" + onlyRegion + ")"; + } + }; + } else { + return new CacheLoader<>() { + @Override + @SuppressWarnings("NullableProblems") + public Optional load(String bucket) { + if (CustomBucketToRegionModule.this.region != null) { + return Optional.of(CustomBucketToRegionModule.this.region); + } + try { + return Optional.fromNullable(client.getBucketLocation(bucket)); + } catch (ContainerNotFoundException e) { + return Optional.absent(); + } + } + + @Override + public String toString() { + return "bucketToRegion()"; + } + }; + } + } + } + + @Bean + @ConditionalOnProperty(name = "datastore.type", havingValue = "filesystem") + public DataStorageService localDataStore( + @Value("${datastore.default.path:/data/store}") String storagePath) { + return new LocalDataStorageService(storagePath); + } + + /** + * Creates BlobStore bean, that works with MinIO. + * + * @param accessKey accessKey to use + * @param secretKey secretKey to use + * @param endpoint MinIO endpoint + * @return {@link BlobStore} + */ + @Bean + @ConditionalOnProperty(name = "datastore.type", havingValue = "minio") + public BlobStore minioBlobStore(@Value("${datastore.minio.accessKey}") String accessKey, + @Value("${datastore.minio.secretKey}") String secretKey, + @Value("${datastore.minio.endpoint}") String endpoint) { + + BlobStoreContext blobStoreContext = + ContextBuilder.newBuilder("s3").endpoint(endpoint).credentials(accessKey, secretKey) + .buildView(BlobStoreContext.class); + + return blobStoreContext.getBlobStore(); + } + + /** + * Creates DataStore bean to work with MinIO. + * + * @param blobStore {@link BlobStore} object + * @param bucketPrefix Prefix for bucket name + * @param defaultBucketName Name of default bucket to use + * @param featureFlagHandler Instance of {@link FeatureFlagHandler} to check enabled features + * @return {@link DataStorageService} object + */ + @Bean + @ConditionalOnProperty(name = "datastore.type", havingValue = "minio") + public DataStorageService minioDataStore(@Autowired BlobStore blobStore, + @Value("${datastore.minio.bucketPrefix}") String bucketPrefix, + @Value("${datastore.minio.defaultBucketName}") String defaultBucketName, + FeatureFlagHandler featureFlagHandler) { + return new S3DataStorageService(blobStore, bucketPrefix, defaultBucketName, featureFlagHandler); + } + + /** + * Creates BlobStore bean, that works with AWS S3. + * + * @param accessKey accessKey to use + * @param secretKey secretKey to use + * @param region AWS S3 region to use. + * @return {@link BlobStore} + */ + @Bean + @ConditionalOnProperty(name = "datastore.type", havingValue = "s3") + public BlobStore blobStore(@Value("${datastore.s3.accessKey}") String accessKey, + @Value("${datastore.s3.secretKey}") String secretKey, + @Value("${datastore.s3.region}") String region) { + Iterable modules = ImmutableSet.of(new CustomBucketToRegionModule(region)); + + BlobStoreContext blobStoreContext = + ContextBuilder.newBuilder("aws-s3").modules(modules).credentials(accessKey, secretKey) + .buildView(BlobStoreContext.class); + + return blobStoreContext.getBlobStore(); + } + + @Bean + @Primary + @ConditionalOnProperty(name = "datastore.type", havingValue = "s3") + public DataStorageService s3DataStore(@Autowired BlobStore blobStore, + @Value("${datastore.s3.bucketPrefix}") String bucketPrefix, + @Value("${datastore.s3.defaultBucketName}") String defaultBucketName, + FeatureFlagHandler featureFlagHandler) { + return new S3DataStorageService(blobStore, bucketPrefix, defaultBucketName, featureFlagHandler); + } } diff --git a/src/main/java/com/epam/reportportal/storage/S3DataStorageService.java b/src/main/java/com/epam/reportportal/storage/S3DataStorageService.java index 1e910e8..979c0fc 100644 --- a/src/main/java/com/epam/reportportal/storage/S3DataStorageService.java +++ b/src/main/java/com/epam/reportportal/storage/S3DataStorageService.java @@ -16,55 +16,73 @@ package com.epam.reportportal.storage; +import com.epam.reportportal.utils.FeatureFlag; +import com.epam.reportportal.utils.FeatureFlagHandler; +import java.nio.file.Path; +import java.nio.file.Paths; import org.jclouds.blobstore.BlobStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.nio.file.Path; -import java.nio.file.Paths; - /** * S3 storage service */ public class S3DataStorageService implements DataStorageService { - private static final Logger LOGGER = LoggerFactory.getLogger(S3DataStorageService.class); + private static final Logger LOGGER = LoggerFactory.getLogger(S3DataStorageService.class); - private final BlobStore blobStore; - private final String bucketPrefix; - private final String defaultBucketName; + private final BlobStore blobStore; + private final String bucketPrefix; + private final String defaultBucketName; - public S3DataStorageService(BlobStore blobStore, String bucketPrefix, String defaultBucketName) { - this.blobStore = blobStore; - this.bucketPrefix = bucketPrefix; - this.defaultBucketName = defaultBucketName; - } + private final FeatureFlagHandler featureFlagHandler; - @Override - public void delete(String filePath) throws Exception { - Path targetPath = Paths.get(filePath); - int nameCount = targetPath.getNameCount(); + /** + * Creates instance of {@link S3DataStorageService}. + * + * @param blobStore {@link BlobStore} + * @param bucketPrefix Prefix for bucket name + * @param defaultBucketName Name for the default bucket(plugins, etc.) + * @param featureFlagHandler {@link FeatureFlagHandler} + */ + public S3DataStorageService(BlobStore blobStore, String bucketPrefix, String defaultBucketName, + FeatureFlagHandler featureFlagHandler) { + this.blobStore = blobStore; + this.bucketPrefix = bucketPrefix; + this.defaultBucketName = defaultBucketName; + this.featureFlagHandler = featureFlagHandler; + } - String bucket; - String objectName; + @Override + public void delete(String filePath) throws Exception { + Path targetPath = Paths.get(filePath); + int nameCount = targetPath.getNameCount(); - if (nameCount > 1) { - bucket = bucketPrefix + retrievePath(targetPath, 0, 1); - objectName = retrievePath(targetPath, 1, nameCount); - } else { - bucket = defaultBucketName; - objectName = retrievePath(targetPath, 0, 1); - } + String bucket; + String objectName; - try { - blobStore.removeBlob(bucket, objectName); - } catch (Exception e) { - LOGGER.error("Unable to delete file '{}'", filePath, e); - throw e; - } + if (featureFlagHandler.isEnabled(FeatureFlag.SINGLE_BUCKET)) { + bucket = defaultBucketName; + objectName = filePath; + } else { + if (nameCount > 1) { + bucket = bucketPrefix + retrievePath(targetPath, 0, 1); + objectName = retrievePath(targetPath, 1, nameCount); + } else { + bucket = defaultBucketName; + objectName = retrievePath(targetPath, 0, 1); + } } - private String retrievePath(Path path, int beginIndex, int endIndex) { - return String.valueOf(path.subpath(beginIndex, endIndex)); + try { + blobStore.removeBlob(bucket, objectName); + } catch (Exception e) { + LOGGER.error("Unable to delete file '{}'", filePath, e); + throw e; } + } + + private String retrievePath(Path path, int beginIndex, int endIndex) { + return String.valueOf(path.subpath(beginIndex, endIndex)); + } } diff --git a/src/main/java/com/epam/reportportal/utils/FeatureFlag.java b/src/main/java/com/epam/reportportal/utils/FeatureFlag.java new file mode 100644 index 0000000..43b99ed --- /dev/null +++ b/src/main/java/com/epam/reportportal/utils/FeatureFlag.java @@ -0,0 +1,35 @@ +package com.epam.reportportal.utils; + +import java.util.Arrays; +import java.util.Optional; + +/** + * Enumeration of current feature flags. + * + * @author Ivan Kustau + */ +public enum FeatureFlag { + SINGLE_BUCKET("singleBucket"); + + private final String name; + + FeatureFlag(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + /** + * Returns {@link Optional} of {@link FeatureFlag} by string. + * + * @param name Name of feature flag + * @return {@link Optional} of {@link FeatureFlag} by string + */ + public static Optional fromString(String name) { + return Optional.ofNullable(name).flatMap( + str -> Arrays.stream(values()).filter(it -> it.name.equalsIgnoreCase(str)).findAny()); + + } +} \ No newline at end of file diff --git a/src/main/java/com/epam/reportportal/utils/FeatureFlagHandler.java b/src/main/java/com/epam/reportportal/utils/FeatureFlagHandler.java new file mode 100644 index 0000000..9c286f9 --- /dev/null +++ b/src/main/java/com/epam/reportportal/utils/FeatureFlagHandler.java @@ -0,0 +1,37 @@ +package com.epam.reportportal.utils; + +import java.util.HashSet; +import java.util.Optional; +import java.util.Set; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; +import org.springframework.util.CollectionUtils; + +/** + * Component for checking enabled feature flags. + * + * @author Ivan Kustau + */ +@Component +public class FeatureFlagHandler { + + private final Set enabledFeatureFlagsSet = new HashSet<>(); + + /** + * Initialises {@link FeatureFlagHandler} by environment variable with enabled feature flags. + * + * @param featureFlags Set of enabled feature flags + */ + public FeatureFlagHandler( + @Value("#{'${rp.feature.flags}'.split(',')}") Set featureFlags) { + + if (!CollectionUtils.isEmpty(featureFlags)) { + featureFlags.stream().map(FeatureFlag::fromString).filter(Optional::isPresent) + .map(Optional::get).forEach(enabledFeatureFlagsSet::add); + } + } + + public boolean isEnabled(FeatureFlag featureFlag) { + return enabledFeatureFlagsSet.contains(featureFlag); + } +} diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml index b1550a3..5ee3721 100644 --- a/src/main/resources/application.yml +++ b/src/main/resources/application.yml @@ -84,6 +84,12 @@ datastore: defaultBucketName: rp-bucket region: #{null} # could be one of [filesystem, s3, minio] + s3: + region: us-standard + accessKey: + secretKey: + bucketPrefix: prj- + defaultBucketName: rp-bucket type: minio From 03972333752cfa5ad39d500f1cc858d6a3f921e0 Mon Sep 17 00:00:00 2001 From: Ivan Kustau <86599591+IvanKustau@users.noreply.github.com> Date: Wed, 26 Apr 2023 12:31:23 +0300 Subject: [PATCH 06/15] EPMRPP-79722 || Replace RuntimeException with checked exception when file is not found in CleanStorageJob (#68) --- .../jobs/clean/CleanStorageJob.java | 104 ++++++++++-------- .../model/BlobNotFoundException.java | 35 ++++++ .../storage/S3DataStorageService.java | 5 +- 3 files changed, 97 insertions(+), 47 deletions(-) create mode 100644 src/main/java/com/epam/reportportal/model/BlobNotFoundException.java diff --git a/src/main/java/com/epam/reportportal/jobs/clean/CleanStorageJob.java b/src/main/java/com/epam/reportportal/jobs/clean/CleanStorageJob.java index 149dbd4..2cd58ee 100644 --- a/src/main/java/com/epam/reportportal/jobs/clean/CleanStorageJob.java +++ b/src/main/java/com/epam/reportportal/jobs/clean/CleanStorageJob.java @@ -1,7 +1,11 @@ package com.epam.reportportal.jobs.clean; import com.epam.reportportal.jobs.BaseJob; +import com.epam.reportportal.model.BlobNotFoundException; import com.epam.reportportal.storage.DataStorageService; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.concurrent.atomic.AtomicInteger; import net.javacrumbs.shedlock.spring.annotation.SchedulerLock; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.util.Strings; @@ -11,10 +15,6 @@ import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import java.nio.charset.StandardCharsets; -import java.util.Base64; -import java.util.concurrent.atomic.AtomicInteger; - /** * Removing data from storage. * @@ -23,52 +23,66 @@ @Service public class CleanStorageJob extends BaseJob { - private static final String ROLLBACK_ERROR_MESSAGE = "Rollback deleting transaction."; - private static final String SELECT_AND_DELETE_DATA_CHUNK_QUERY = "DELETE FROM attachment_deletion WHERE id in " + - "(SELECT id FROM attachment_deletion ORDER BY id LIMIT ?) RETURNING *"; - private final DataStorageService storageService; - private final int chunkSize; + private static final String ROLLBACK_ERROR_MESSAGE = "Rollback deleting transaction."; + private static final String SELECT_AND_DELETE_DATA_CHUNK_QUERY = + "DELETE FROM attachment_deletion WHERE id IN " + + "(SELECT id FROM attachment_deletion ORDER BY id LIMIT ?) RETURNING *"; + private final DataStorageService storageService; + private final int chunkSize; - public CleanStorageJob(JdbcTemplate jdbcTemplate, DataStorageService storageService, - @Value("${rp.environment.variable.clean.storage.chunkSize}") int chunkSize) { - super(jdbcTemplate); - this.chunkSize = chunkSize; - this.storageService = storageService; - } + /** + * Initializes {@link CleanStorageJob}. + * + * @param jdbcTemplate {@link JdbcTemplate} + * @param storageService {@link DataStorageService} + * @param chunkSize Size of elements deleted at once + */ + public CleanStorageJob(JdbcTemplate jdbcTemplate, DataStorageService storageService, + @Value("${rp.environment.variable.clean.storage.chunkSize}") int chunkSize) { + super(jdbcTemplate); + this.chunkSize = chunkSize; + this.storageService = storageService; + } - @Scheduled(cron = "${rp.environment.variable.clean.storage.cron}") - @SchedulerLock(name = "cleanStorage", lockAtMostFor = "24h") - @Transactional - public void execute() { - logStart(); - AtomicInteger counter = new AtomicInteger(0); + /** + * Deletes attachments, which are set to be deleted. + */ + @Scheduled(cron = "${rp.environment.variable.clean.storage.cron}") + @SchedulerLock(name = "cleanStorage", lockAtMostFor = "24h") + @Transactional + public void execute() { + logStart(); + AtomicInteger counter = new AtomicInteger(0); - jdbcTemplate.query(SELECT_AND_DELETE_DATA_CHUNK_QUERY, rs -> { - try { - delete(rs.getString("file_id"), rs.getString("thumbnail_id")); - counter.incrementAndGet(); - while (rs.next()) { - delete(rs.getString("file_id"), rs.getString("thumbnail_id")); - counter.incrementAndGet(); - } - } catch (Exception e) { - throw new RuntimeException(ROLLBACK_ERROR_MESSAGE, e); - } - }, chunkSize); + jdbcTemplate.query(SELECT_AND_DELETE_DATA_CHUNK_QUERY, rs -> { + try { + delete(rs.getString("file_id"), rs.getString("thumbnail_id")); + counter.incrementAndGet(); + while (rs.next()) { + delete(rs.getString("file_id"), rs.getString("thumbnail_id")); + counter.incrementAndGet(); + } + } catch (BlobNotFoundException e) { + LOGGER.info("File {} is not found when executing clean storage job", e.getFileName()); + } catch (Exception e) { + throw new RuntimeException(ROLLBACK_ERROR_MESSAGE, e); + } + }, chunkSize); - logFinish(counter.get()); - } + logFinish(counter.get()); + } - private void delete(String fileId, String thumbnailId) throws Exception { - if (Strings.isNotBlank(fileId)) { - storageService.delete(decode(fileId)); - } - if (Strings.isNotBlank(thumbnailId)) { - storageService.delete(decode(thumbnailId)); - } + private void delete(String fileId, String thumbnailId) throws Exception { + if (Strings.isNotBlank(fileId)) { + storageService.delete(decode(fileId)); } - - private String decode(String data) { - return StringUtils.isEmpty(data) ? data : new String(Base64.getUrlDecoder().decode(data), StandardCharsets.UTF_8); + if (Strings.isNotBlank(thumbnailId)) { + storageService.delete(decode(thumbnailId)); } + } + + private String decode(String data) { + return StringUtils.isEmpty(data) ? data : + new String(Base64.getUrlDecoder().decode(data), StandardCharsets.UTF_8); + } } diff --git a/src/main/java/com/epam/reportportal/model/BlobNotFoundException.java b/src/main/java/com/epam/reportportal/model/BlobNotFoundException.java new file mode 100644 index 0000000..fbdabb4 --- /dev/null +++ b/src/main/java/com/epam/reportportal/model/BlobNotFoundException.java @@ -0,0 +1,35 @@ +package com.epam.reportportal.model; + +/** + * Checked exception for cases when file is not found in blob storage. + */ +public class BlobNotFoundException extends Exception { + + private String fileName; + + public BlobNotFoundException() { + super(); + } + + public BlobNotFoundException(String message) { + super(message); + } + + public BlobNotFoundException(String fileName, Throwable cause) { + super(cause); + this.fileName = fileName; + } + + public BlobNotFoundException(Throwable cause) { + super(cause); + } + + protected BlobNotFoundException(String message, Throwable cause, boolean enableSuppression, + boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } + + public String getFileName() { + return fileName; + } +} diff --git a/src/main/java/com/epam/reportportal/storage/S3DataStorageService.java b/src/main/java/com/epam/reportportal/storage/S3DataStorageService.java index 979c0fc..5bbb789 100644 --- a/src/main/java/com/epam/reportportal/storage/S3DataStorageService.java +++ b/src/main/java/com/epam/reportportal/storage/S3DataStorageService.java @@ -16,6 +16,7 @@ package com.epam.reportportal.storage; +import com.epam.reportportal.model.BlobNotFoundException; import com.epam.reportportal.utils.FeatureFlag; import com.epam.reportportal.utils.FeatureFlagHandler; import java.nio.file.Path; @@ -25,7 +26,7 @@ import org.slf4j.LoggerFactory; /** - * S3 storage service + * S3 storage service. */ public class S3DataStorageService implements DataStorageService { @@ -78,7 +79,7 @@ public void delete(String filePath) throws Exception { blobStore.removeBlob(bucket, objectName); } catch (Exception e) { LOGGER.error("Unable to delete file '{}'", filePath, e); - throw e; + throw new BlobNotFoundException(e); } } From 22b0f669ecd90b09953f7db0f5e4ee330be19902 Mon Sep 17 00:00:00 2001 From: APiankouski <109206864+APiankouski@users.noreply.github.com> Date: Wed, 17 May 2023 18:06:15 +0300 Subject: [PATCH 07/15] Merge master to hotfix/next (#72) * Release 5.8.0 (#71) * EPMRPP-81362 || Fix security vulnerabilities (#58) * EPMRPP-81362 || Update gson version to make able jcloud work (#59) * Merge master to 5.7.5 (#66) * EPMRPP-80865|| Update bom version * [Gradle Release Plugin] - new version commit: '5.7.5'. * EPMRPP-82673-exec-jar promote.yml update (added exec jar) --------- Co-authored-by: miracle8484 <76156909+miracle8484@users.noreply.github.com> Co-authored-by: reportportal.io Co-authored-by: rkukharenka Co-authored-by: Ryhor <125865748+rkukharenka@users.noreply.github.com> * Update version * EPMRPP-83538 || Job service version is missing on Login page * Update version --------- Co-authored-by: miracle8484 <76156909+miracle8484@users.noreply.github.com> Co-authored-by: Ivan Kustau <86599591+IvanKustau@users.noreply.github.com> Co-authored-by: reportportal.io Co-authored-by: rkukharenka Co-authored-by: Ryhor <125865748+rkukharenka@users.noreply.github.com> Co-authored-by: Andrei Piankouski * [Gradle Release Plugin] - new version commit: '5.8.1'. --------- Co-authored-by: miracle8484 <76156909+miracle8484@users.noreply.github.com> Co-authored-by: Ivan Kustau <86599591+IvanKustau@users.noreply.github.com> Co-authored-by: reportportal.io Co-authored-by: rkukharenka Co-authored-by: Ryhor <125865748+rkukharenka@users.noreply.github.com> Co-authored-by: Andrei Piankouski --- .github/workflows/release.yml | 2 +- gradle.properties | 2 +- src/main/resources/application.properties | 3 ++- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 744523d..3374fca 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,7 +12,7 @@ on: env: GH_USER_NAME: github.actor SCRIPTS_VERSION: 5.7.0 - BOM_VERSION: 5.7.4 + BOM_VERSION: 5.7.5 REPOSITORY_URL: 'https://maven.pkg.github.com/' jobs: diff --git a/gradle.properties b/gradle.properties index 81f77b1..223ed93 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,4 +1,4 @@ -version=5.8.0 +version=5.8.1 description=EPAM Report portal. Service jobs dockerServerUrl=unix:///var/run/docker.sock dockerPrepareEnvironment=apk -U -q upgrade && apk --no-cache -q add openjdk11 ca-certificates diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 526fa52..c84bd87 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -7,4 +7,5 @@ info.build.repo=${repo} server.port=8686 management.endpoints.web.exposure.include=info, health management.endpoints.web.base-path=/ -management.endpoint.info.enabled=true \ No newline at end of file +management.endpoint.info.enabled=true +management.info.env.enabled=true \ No newline at end of file From 487b7c03f074669bc59684d81ec228ded52c75a4 Mon Sep 17 00:00:00 2001 From: Ivan Kustau <86599591+IvanKustau@users.noreply.github.com> Date: Thu, 18 May 2023 11:37:14 +0300 Subject: [PATCH 08/15] EPRMPP-83651 || Clean storage job out of memory (#74) * EPMRPP-83651 || Create batching for clean storage job * EPMRPP-83651 || Refactor CleanStorageJob * EPMRPP-83651 || Add check for empty attachment_deletion table * EPMRPP-83651 || Clean attachments list every batch * EPMRPP-83651 || Add default value for feature flags * EPMRPP-83651 || Change logic for CleanStorageJob when using multibucket * EPMRPP-83651 || Fix bug interrupting remove files when bucket is not found * EPMRPP-83651 || Refactor CodeStyle --- .../jobs/clean/CleanStorageJob.java | 58 +++++++++++++------ .../storage/DataStorageService.java | 4 +- .../storage/LocalDataStorageService.java | 29 +++++----- .../storage/S3DataStorageService.java | 57 ++++++++++-------- src/main/resources/application.yml | 3 + 5 files changed, 95 insertions(+), 56 deletions(-) diff --git a/src/main/java/com/epam/reportportal/jobs/clean/CleanStorageJob.java b/src/main/java/com/epam/reportportal/jobs/clean/CleanStorageJob.java index 2cd58ee..fb61a06 100644 --- a/src/main/java/com/epam/reportportal/jobs/clean/CleanStorageJob.java +++ b/src/main/java/com/epam/reportportal/jobs/clean/CleanStorageJob.java @@ -4,11 +4,13 @@ import com.epam.reportportal.model.BlobNotFoundException; import com.epam.reportportal.storage.DataStorageService; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.Base64; +import java.util.List; import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; import net.javacrumbs.shedlock.spring.annotation.SchedulerLock; import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.util.Strings; import org.springframework.beans.factory.annotation.Value; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.scheduling.annotation.Scheduled; @@ -27,9 +29,13 @@ public class CleanStorageJob extends BaseJob { private static final String SELECT_AND_DELETE_DATA_CHUNK_QUERY = "DELETE FROM attachment_deletion WHERE id IN " + "(SELECT id FROM attachment_deletion ORDER BY id LIMIT ?) RETURNING *"; + + private static final int MAX_BATCH_SIZE = 200000; private final DataStorageService storageService; private final int chunkSize; + private final int batchSize; + /** * Initializes {@link CleanStorageJob}. * @@ -42,6 +48,7 @@ public CleanStorageJob(JdbcTemplate jdbcTemplate, DataStorageService storageServ super(jdbcTemplate); this.chunkSize = chunkSize; this.storageService = storageService; + this.batchSize = chunkSize <= MAX_BATCH_SIZE ? chunkSize : MAX_BATCH_SIZE; } /** @@ -54,31 +61,44 @@ public void execute() { logStart(); AtomicInteger counter = new AtomicInteger(0); - jdbcTemplate.query(SELECT_AND_DELETE_DATA_CHUNK_QUERY, rs -> { + int batchNumber = 1; + while (batchNumber * batchSize <= chunkSize) { + List attachments = new ArrayList<>(); + List thumbnails = new ArrayList<>(); + jdbcTemplate.query(SELECT_AND_DELETE_DATA_CHUNK_QUERY, rs -> { + do { + String attachment = rs.getString("file_id"); + String thumbnail = rs.getString("thumbnail_id"); + if (attachment != null) { + attachments.add(attachment); + } + if (thumbnail != null) { + thumbnails.add(thumbnail); + } + } while (rs.next()); + }, batchSize); + + int attachmentsSize = thumbnails.size() + attachments.size(); + if (attachmentsSize == 0) { + break; + } try { - delete(rs.getString("file_id"), rs.getString("thumbnail_id")); - counter.incrementAndGet(); - while (rs.next()) { - delete(rs.getString("file_id"), rs.getString("thumbnail_id")); - counter.incrementAndGet(); - } + storageService.deleteAll( + thumbnails.stream().map(this::decode).collect(Collectors.toList())); + storageService.deleteAll( + attachments.stream().map(this::decode).collect(Collectors.toList())); } catch (BlobNotFoundException e) { - LOGGER.info("File {} is not found when executing clean storage job", e.getFileName()); + LOGGER.info("File is not found when executing clean storage job"); } catch (Exception e) { throw new RuntimeException(ROLLBACK_ERROR_MESSAGE, e); } - }, chunkSize); - - logFinish(counter.get()); - } - private void delete(String fileId, String thumbnailId) throws Exception { - if (Strings.isNotBlank(fileId)) { - storageService.delete(decode(fileId)); - } - if (Strings.isNotBlank(thumbnailId)) { - storageService.delete(decode(thumbnailId)); + counter.addAndGet(attachmentsSize); + LOGGER.info("Iteration {}, deleted {} attachments", batchNumber, attachmentsSize); + batchNumber++; } + + logFinish(counter.get()); } private String decode(String data) { diff --git a/src/main/java/com/epam/reportportal/storage/DataStorageService.java b/src/main/java/com/epam/reportportal/storage/DataStorageService.java index d7db8a1..d28fc96 100644 --- a/src/main/java/com/epam/reportportal/storage/DataStorageService.java +++ b/src/main/java/com/epam/reportportal/storage/DataStorageService.java @@ -16,9 +16,11 @@ package com.epam.reportportal.storage; +import java.util.List; + /** * Storage service interface */ public interface DataStorageService { - void delete(String filePath) throws Exception; + void deleteAll(List paths) throws Exception; } \ No newline at end of file diff --git a/src/main/java/com/epam/reportportal/storage/LocalDataStorageService.java b/src/main/java/com/epam/reportportal/storage/LocalDataStorageService.java index 03e45ca..c1f8cad 100644 --- a/src/main/java/com/epam/reportportal/storage/LocalDataStorageService.java +++ b/src/main/java/com/epam/reportportal/storage/LocalDataStorageService.java @@ -16,6 +16,7 @@ package com.epam.reportportal.storage; +import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,21 +29,23 @@ */ public class LocalDataStorageService implements DataStorageService { - private static final Logger LOGGER = LoggerFactory.getLogger(LocalDataStorageService.class); + private static final Logger LOGGER = LoggerFactory.getLogger(LocalDataStorageService.class); - private final String storageRootPath; + private final String storageRootPath; - public LocalDataStorageService(String storageRootPath) { - this.storageRootPath = storageRootPath; - } + public LocalDataStorageService(String storageRootPath) { + this.storageRootPath = storageRootPath; + } - @Override - public void delete(String filePath) throws IOException { - try { - Files.deleteIfExists(Paths.get(storageRootPath, filePath)); - } catch (IOException e) { - LOGGER.error("Unable to delete file '{}'", filePath, e); - throw e; - } + @Override + public void deleteAll(List paths) throws IOException { + for (String path : paths) { + try { + Files.deleteIfExists(Paths.get(storageRootPath, path)); + } catch (IOException e) { + LOGGER.error("Unable to delete file '{}'", path, e); + throw e; + } } + } } diff --git a/src/main/java/com/epam/reportportal/storage/S3DataStorageService.java b/src/main/java/com/epam/reportportal/storage/S3DataStorageService.java index 5bbb789..a235465 100644 --- a/src/main/java/com/epam/reportportal/storage/S3DataStorageService.java +++ b/src/main/java/com/epam/reportportal/storage/S3DataStorageService.java @@ -16,14 +16,18 @@ package com.epam.reportportal.storage; -import com.epam.reportportal.model.BlobNotFoundException; import com.epam.reportportal.utils.FeatureFlag; import com.epam.reportportal.utils.FeatureFlagHandler; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.jclouds.blobstore.BlobStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.util.CollectionUtils; /** * S3 storage service. @@ -55,35 +59,42 @@ public S3DataStorageService(BlobStore blobStore, String bucketPrefix, String def } @Override - public void delete(String filePath) throws Exception { - Path targetPath = Paths.get(filePath); - int nameCount = targetPath.getNameCount(); - - String bucket; - String objectName; - + public void deleteAll(List paths) throws Exception { + if (CollectionUtils.isEmpty(paths)) { + return; + } if (featureFlagHandler.isEnabled(FeatureFlag.SINGLE_BUCKET)) { - bucket = defaultBucketName; - objectName = filePath; + removeFiles(defaultBucketName, paths); } else { - if (nameCount > 1) { - bucket = bucketPrefix + retrievePath(targetPath, 0, 1); - objectName = retrievePath(targetPath, 1, nameCount); - } else { - bucket = defaultBucketName; - objectName = retrievePath(targetPath, 0, 1); + Map> bucketPathMap = new HashMap<>(); + for (String path : paths) { + Path targetPath = Paths.get(path); + int nameCount = targetPath.getNameCount(); + String bucket = retrievePath(targetPath, 0, 1); + String cutPath = retrievePath(targetPath, 1, nameCount); + if (bucketPathMap.containsKey(bucket)) { + bucketPathMap.get(bucket).add(cutPath); + } else { + List bucketPaths = new ArrayList<>(); + bucketPaths.add(cutPath); + bucketPathMap.put(bucket, bucketPaths); + } + } + for (Map.Entry> bucketPaths : bucketPathMap.entrySet()) { + removeFiles(bucketPrefix + bucketPaths.getKey(), bucketPaths.getValue()); } - } - - try { - blobStore.removeBlob(bucket, objectName); - } catch (Exception e) { - LOGGER.error("Unable to delete file '{}'", filePath, e); - throw new BlobNotFoundException(e); } } private String retrievePath(Path path, int beginIndex, int endIndex) { return String.valueOf(path.subpath(beginIndex, endIndex)); } + + private void removeFiles(String bucketName, List paths) { + try { + blobStore.removeBlobs(bucketName, paths); + } catch (Exception e) { + LOGGER.warn("Exception {} is occurred during deleting file", e.getMessage()); + } + } } diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml index 5ee3721..d1f1ad4 100644 --- a/src/main/resources/application.yml +++ b/src/main/resources/application.yml @@ -1,4 +1,6 @@ rp: + feature: + flags: environment: variable: elements-counter: @@ -8,6 +10,7 @@ rp: ## 30 seconds cron: '*/30 * * * * *' chunkSize: 1000 + batchSize: 100 attachment: ## 2 minutes cron: '0 */2 * * * *' From f4ce340af4deb37e53646966e07776772458636b Mon Sep 17 00:00:00 2001 From: Ivan Kustau <86599591+IvanKustau@users.noreply.github.com> Date: Mon, 29 May 2023 10:47:44 +0300 Subject: [PATCH 09/15] EPMRPP-83098 || Update datastore variables naming (#75) --- .../config/DataStorageConfig.java | 22 +++++++++---------- src/main/resources/application.yml | 20 +++++------------ 2 files changed, 17 insertions(+), 25 deletions(-) diff --git a/src/main/java/com/epam/reportportal/config/DataStorageConfig.java b/src/main/java/com/epam/reportportal/config/DataStorageConfig.java index 050290c..19cdc53 100644 --- a/src/main/java/com/epam/reportportal/config/DataStorageConfig.java +++ b/src/main/java/com/epam/reportportal/config/DataStorageConfig.java @@ -136,7 +136,7 @@ public String toString() { @Bean @ConditionalOnProperty(name = "datastore.type", havingValue = "filesystem") public DataStorageService localDataStore( - @Value("${datastore.default.path:/data/store}") String storagePath) { + @Value("${datastore.path:/data/store}") String storagePath) { return new LocalDataStorageService(storagePath); } @@ -150,9 +150,9 @@ public DataStorageService localDataStore( */ @Bean @ConditionalOnProperty(name = "datastore.type", havingValue = "minio") - public BlobStore minioBlobStore(@Value("${datastore.minio.accessKey}") String accessKey, - @Value("${datastore.minio.secretKey}") String secretKey, - @Value("${datastore.minio.endpoint}") String endpoint) { + public BlobStore minioBlobStore(@Value("${datastore.accessKey}") String accessKey, + @Value("${datastore.secretKey}") String secretKey, + @Value("${datastore.endpoint}") String endpoint) { BlobStoreContext blobStoreContext = ContextBuilder.newBuilder("s3").endpoint(endpoint).credentials(accessKey, secretKey) @@ -173,8 +173,8 @@ public BlobStore minioBlobStore(@Value("${datastore.minio.accessKey}") String ac @Bean @ConditionalOnProperty(name = "datastore.type", havingValue = "minio") public DataStorageService minioDataStore(@Autowired BlobStore blobStore, - @Value("${datastore.minio.bucketPrefix}") String bucketPrefix, - @Value("${datastore.minio.defaultBucketName}") String defaultBucketName, + @Value("${datastore.bucketPrefix}") String bucketPrefix, + @Value("${datastore.defaultBucketName}") String defaultBucketName, FeatureFlagHandler featureFlagHandler) { return new S3DataStorageService(blobStore, bucketPrefix, defaultBucketName, featureFlagHandler); } @@ -189,9 +189,9 @@ public DataStorageService minioDataStore(@Autowired BlobStore blobStore, */ @Bean @ConditionalOnProperty(name = "datastore.type", havingValue = "s3") - public BlobStore blobStore(@Value("${datastore.s3.accessKey}") String accessKey, - @Value("${datastore.s3.secretKey}") String secretKey, - @Value("${datastore.s3.region}") String region) { + public BlobStore blobStore(@Value("${datastore.accessKey}") String accessKey, + @Value("${datastore.secretKey}") String secretKey, + @Value("${datastore.region}") String region) { Iterable modules = ImmutableSet.of(new CustomBucketToRegionModule(region)); BlobStoreContext blobStoreContext = @@ -205,8 +205,8 @@ public BlobStore blobStore(@Value("${datastore.s3.accessKey}") String accessKey, @Primary @ConditionalOnProperty(name = "datastore.type", havingValue = "s3") public DataStorageService s3DataStore(@Autowired BlobStore blobStore, - @Value("${datastore.s3.bucketPrefix}") String bucketPrefix, - @Value("${datastore.s3.defaultBucketName}") String defaultBucketName, + @Value("${datastore.bucketPrefix}") String bucketPrefix, + @Value("${datastore.defaultBucketName}") String defaultBucketName, FeatureFlagHandler featureFlagHandler) { return new S3DataStorageService(blobStore, bucketPrefix, defaultBucketName, featureFlagHandler); } diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml index d1f1ad4..c6aaff6 100644 --- a/src/main/resources/application.yml +++ b/src/main/resources/application.yml @@ -79,20 +79,12 @@ rp: datastore: path: /data/storage - minio: - endpoint: http://minio:9000 - accessKey: - secretKey: - bucketPrefix: prj- - defaultBucketName: rp-bucket - region: #{null} - # could be one of [filesystem, s3, minio] - s3: - region: us-standard - accessKey: - secretKey: - bucketPrefix: prj- - defaultBucketName: rp-bucket type: minio + endpoint: http://minio:9000 + accessKey: + secretKey: + bucketPrefix: prj- + defaultBucketName: rp-bucket + region: #{null} From 2e9658b266a096e6aed106ae16e10e98b69b7ef5 Mon Sep 17 00:00:00 2001 From: Ivan_Kustau Date: Wed, 31 May 2023 11:00:52 +0300 Subject: [PATCH 10/15] Update gradle scripts version --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index a104ef8..4b4ad80 100644 --- a/build.gradle +++ b/build.gradle @@ -9,7 +9,7 @@ project.ext { releaseMode = project.hasProperty("releaseMode") } -def scriptsUrl = 'https://raw.githubusercontent.com/reportportal/gradle-scripts/' + (releaseMode ? getProperty('scripts.version') : '5.5.0') +def scriptsUrl = 'https://raw.githubusercontent.com/reportportal/gradle-scripts/' + (releaseMode ? getProperty('scripts.version') : 'develop') apply from: "$scriptsUrl/build-docker.gradle" apply from: "$scriptsUrl/build-commons.gradle" From b02fc8a197e0f748f72f69af7db2d89fd3404def Mon Sep 17 00:00:00 2001 From: Ivan_Kustau Date: Wed, 31 May 2023 11:08:09 +0300 Subject: [PATCH 11/15] Remove dockerPrepareEnvironment --- gradle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gradle.properties b/gradle.properties index 223ed93..eda4b33 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,5 +1,5 @@ version=5.8.1 description=EPAM Report portal. Service jobs dockerServerUrl=unix:///var/run/docker.sock -dockerPrepareEnvironment=apk -U -q upgrade && apk --no-cache -q add openjdk11 ca-certificates +dockerPrepareEnvironment= dockerJavaOpts=-Xmx512m -XX:+UseG1GC -XX:InitiatingHeapOccupancyPercent=70 -Djava.security.egd=file:/dev/./urandom \ No newline at end of file From 692fb0c978734eb6860497888c70248a790947a7 Mon Sep 17 00:00:00 2001 From: Ivan_Kustau Date: Fri, 30 Jun 2023 15:33:51 +0300 Subject: [PATCH 12/15] EPMRPP-84200 || Update release versions --- .github/workflows/release.yml | 2 +- Dockerfile | 15 ++++++++------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3374fca..2607103 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,7 +11,7 @@ on: env: GH_USER_NAME: github.actor - SCRIPTS_VERSION: 5.7.0 + SCRIPTS_VERSION: 5.8.0 BOM_VERSION: 5.7.5 REPOSITORY_URL: 'https://maven.pkg.github.com/' diff --git a/Dockerfile b/Dockerfile index eadc29c..818dc14 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,11 @@ -FROM alpine:latest -LABEL version=5.8.0 description="EPAM Report portal. Service jobs" maintainer="Andrei Varabyeu , Hleb Kanonik " +FROM amazoncorretto:11.0.17 +LABEL version=5.8.1 description="EPAM Report portal. Service jobs" maintainer="Andrei Varabyeu , Hleb Kanonik " ARG GH_TOKEN -RUN apk -U -q upgrade && apk --no-cache -q add openjdk11 ca-certificates && \ - echo 'exec java ${JAVA_OPTS} -jar service-jobs-5.8.0-exec.jar' > /start.sh && chmod +x /start.sh && \ - wget --header="Authorization: Bearer ${GH_TOKEN}" -q https://maven.pkg.github.com/reportportal/service-jobs/com/epam/reportportal/service-jobs/5.8.0/service-jobs-5.8.0-exec.jar -ENV JAVA_OPTS="-Xmx512m -XX:+UseG1GC -XX:InitiatingHeapOccupancyPercent=70 -Djava.security.egd=file:/dev/./urandom" +ARG GH_URL=https://__:$GH_TOKEN@maven.pkg.github.com/reportportal/service-jobs/com/epam/reportportal/service-jobs/5.7.4/service-jobs-5.8.1-exec.jar +RUN curl -O -L $GH_URL \ + --output service-jobs-5.8.1-exec.jar && \ + echo 'exec java ${JAVA_OPTS} -jar service-jobs-5.8.1.exec.jar' > /start.sh && chmod +x /start.sh +ENV JAVA_OPTS="" VOLUME ["/tmp"] EXPOSE 8080 -ENTRYPOINT ./start.sh +ENTRYPOINT ./start.sh \ No newline at end of file From 89b03bdc2a05077b203e2a489da91d58f997107f Mon Sep 17 00:00:00 2001 From: Ivan_Kustau Date: Fri, 30 Jun 2023 15:38:25 +0300 Subject: [PATCH 13/15] EPMRPP-84200 || Update gradle scripts version --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 4b4ad80..0ad29c8 100644 --- a/build.gradle +++ b/build.gradle @@ -9,7 +9,7 @@ project.ext { releaseMode = project.hasProperty("releaseMode") } -def scriptsUrl = 'https://raw.githubusercontent.com/reportportal/gradle-scripts/' + (releaseMode ? getProperty('scripts.version') : 'develop') +def scriptsUrl = 'https://raw.githubusercontent.com/reportportal/gradle-scripts/' + (releaseMode ? getProperty('scripts.version') : 'master') apply from: "$scriptsUrl/build-docker.gradle" apply from: "$scriptsUrl/build-commons.gradle" From eddcff08ebec5afaddec5a7cf4ba045b4f6b5e62 Mon Sep 17 00:00:00 2001 From: Ivan Kustau <86599591+IvanKustau@users.noreply.github.com> Date: Mon, 10 Jul 2023 14:34:26 +0300 Subject: [PATCH 14/15] Clean storage job fix (#77) * Add clear of attachments --- .../com/epam/reportportal/jobs/clean/CleanStorageJob.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/epam/reportportal/jobs/clean/CleanStorageJob.java b/src/main/java/com/epam/reportportal/jobs/clean/CleanStorageJob.java index fb61a06..aa226dd 100644 --- a/src/main/java/com/epam/reportportal/jobs/clean/CleanStorageJob.java +++ b/src/main/java/com/epam/reportportal/jobs/clean/CleanStorageJob.java @@ -62,9 +62,9 @@ public void execute() { AtomicInteger counter = new AtomicInteger(0); int batchNumber = 1; + List attachments = new ArrayList<>(); + List thumbnails = new ArrayList<>(); while (batchNumber * batchSize <= chunkSize) { - List attachments = new ArrayList<>(); - List thumbnails = new ArrayList<>(); jdbcTemplate.query(SELECT_AND_DELETE_DATA_CHUNK_QUERY, rs -> { do { String attachment = rs.getString("file_id"); @@ -87,6 +87,8 @@ public void execute() { thumbnails.stream().map(this::decode).collect(Collectors.toList())); storageService.deleteAll( attachments.stream().map(this::decode).collect(Collectors.toList())); + attachments.clear(); + thumbnails.clear(); } catch (BlobNotFoundException e) { LOGGER.info("File is not found when executing clean storage job"); } catch (Exception e) { From 46b7aa62b55dc253745e729c883e50beccc11847 Mon Sep 17 00:00:00 2001 From: Ivan_Kustau Date: Mon, 10 Jul 2023 14:37:27 +0300 Subject: [PATCH 15/15] Update bom version --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2607103..f8840b0 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,7 +12,7 @@ on: env: GH_USER_NAME: github.actor SCRIPTS_VERSION: 5.8.0 - BOM_VERSION: 5.7.5 + BOM_VERSION: 5.7.6 REPOSITORY_URL: 'https://maven.pkg.github.com/' jobs: