diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index ca101f3207..d35b840d39 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -26,5 +26,9 @@ jobs: AWS_ACCESS_KEY_ID: localstack AWS_SECRET_ACCESS_KEY: localstack AWS_DEFAULT_REGION: us-east-1 + - name: Run Setup of JobService + working-directory: ./xyz-jobs/xyz-job-service/src/main/bash + run: docker run --rm --entrypoint '' -v ./localSetup.sh:/aws/localSetup.sh --add-host host.docker.internal=host-gateway amazon/aws-cli ./localSetup.sh true - name: Run tests + working-directory: ./ run: mvn verify -DskipTests=false diff --git a/Dockerfile-job-service b/Dockerfile-job-service new file mode 100644 index 0000000000..8df248c4cf --- /dev/null +++ b/Dockerfile-job-service @@ -0,0 +1,20 @@ +FROM openjdk:17-slim + +MAINTAINER Benjamin Rögner "benjamin.roegner@here.com" +MAINTAINER Lucas Ceni "lucas.ceni@here.com" +MAINTAINER Dimitar Goshev "dimitar.goshev@here.com" +MAINTAINER Minikon Nah "minikon.nah@here.com" +MAINTAINER Maximilian Chrzan "maximilian.chrzan@here.com" + +ENV LOG_CONFIG log4j2-console-plain.json +ENV LOG_PATH /var/log/xyz + +#Override the following environment variables to let the service connect to different host names +ENV LOCALSTACK_ENDPOINT http://aws-localstack:4566 +ENV HUB_ENDPOINT http://xyz-hub:8080/hub + +COPY xyz-jobs/xyz-job-service/target/xyz-job-service.jar . +ADD Dockerfile-job-service / + +EXPOSE 7070 +CMD java -jar xyz-job-service.jar diff --git a/docker-compose-dynamodb.yml b/docker-compose-dynamodb.yml index 2e422fc51e..c29ae38398 100644 --- a/docker-compose-dynamodb.yml +++ b/docker-compose-dynamodb.yml @@ -35,6 +35,24 @@ services: - "postgres" - "aws-localstack" command: java -cp xyz-hub-service.jar com.here.xyz.httpconnector.CService + xyz-job-service: + image: "xyz-job" + build: + context: "./" + dockerfile: "Dockerfile-job-service" + container_name: xyz-job-service + ports: + - "7070:7070" + environment: + - HUB_ENDPOINT=http://xyz-hub:8080/hub + - LOCALSTACK_ENDPOINT=http://localstack:4566 + - JOBS_DYNAMODB_TABLE_ARN=arn:aws:dynamodb:dynamodb:000000008000:table/xyz-jobs-local + depends_on: + - "xyz-hub" + - "dynamodb" + - "postgres" + - "aws-localstack" + command: java -cp xyz-job-service.jar com.here.xyz.jobs.service.JobService postgres: image: "xyz-postgres" build: diff --git a/xyz-connectors/src/main/java/com/here/xyz/connectors/AbstractConnectorHandler.java b/xyz-connectors/src/main/java/com/here/xyz/connectors/AbstractConnectorHandler.java index 8490297955..12ae492399 100644 --- a/xyz-connectors/src/main/java/com/here/xyz/connectors/AbstractConnectorHandler.java +++ b/xyz-connectors/src/main/java/com/here/xyz/connectors/AbstractConnectorHandler.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2017-2023 HERE Europe B.V. + * Copyright (C) 2017-2024 HERE Europe B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,6 +19,7 @@ package com.here.xyz.connectors; +import static com.here.xyz.responses.XyzError.EXCEPTION; import static com.here.xyz.responses.XyzError.FORBIDDEN; import com.amazonaws.services.lambda.AWSLambda; @@ -34,8 +35,8 @@ import com.here.xyz.XyzSerializable; import com.here.xyz.connectors.decryptors.EventDecryptor; import com.here.xyz.connectors.decryptors.EventDecryptor.Decryptors; -import com.here.xyz.connectors.runtime.ConnectorRuntime; -import com.here.xyz.connectors.runtime.LambdaConnectorRuntime; +import com.here.xyz.util.runtime.FunctionRuntime; +import com.here.xyz.util.runtime.LambdaFunctionRuntime; import com.here.xyz.events.Event; import com.here.xyz.events.EventNotification; import com.here.xyz.events.HealthCheckEvent; @@ -63,10 +64,6 @@ * A default implementation of a request handler that can be reused. It supports out of the box caching via e-tag. */ public abstract class AbstractConnectorHandler implements RequestStreamHandler { - - /** - * Logger - */ private static final Logger logger = LogManager.getLogger(); /** @@ -216,7 +213,7 @@ public void handleRequest(InputStream input, OutputStream output, Context contex String connectorId = null; this.streamId = streamId != null ? streamId : event.getStreamId(); - new LambdaConnectorRuntime(context, this.streamId); + new LambdaFunctionRuntime(context, this.streamId); if (event.getConnectorParams() != null && event.getConnectorParams().get("connectorId") != null) connectorId = (String) event.getConnectorParams().get("connectorId"); @@ -242,7 +239,7 @@ public void handleRequest(InputStream input, OutputStream output, Context contex logger.error("{} Unexpected exception occurred:", traceItem, e); dataOut = new ErrorResponse() .withStreamId(this.streamId) - .withError(XyzError.EXCEPTION) + .withError(EXCEPTION) .withErrorMessage("Unexpected exception occurred."); } catch (OutOfMemoryError e) { @@ -311,7 +308,7 @@ private void writeDataOut(OutputStream output, Typed dataOut, String ifNoneMatch .toByteArray(); } - final boolean runningLocally = ConnectorRuntime.getInstance().isRunningLocally(); + final boolean runningLocally = FunctionRuntime.getInstance().isRunningLocally(); if (dataOut instanceof BinaryResponse) { //NOTE: BinaryResponses contain an ETag automatically, nothing to calculate here String etag = ((BinaryResponse) dataOut).getEtag(); @@ -372,8 +369,8 @@ private static void checkEventTypeAllowed(Event event) throws ErrorResponseExcep * These type of events are sent in regular intervals to the lambda handler and could be used to keep the handler's container active and * the connection to the database open. */ - protected XyzResponse processHealthCheckEvent(HealthCheckEvent event) { - if (event.getWarmupCount() > 0 && !ConnectorRuntime.getInstance().isRunningLocally()) { + protected HealthStatus processHealthCheckEvent(HealthCheckEvent event) throws Exception { + if (event.getWarmupCount() > 0 && !FunctionRuntime.getInstance().isRunningLocally()) { int warmupCount = event.getWarmupCount(); event.setWarmupCount(0); byte[] newEvent = event.toByteArray(); @@ -383,7 +380,7 @@ protected XyzResponse processHealthCheckEvent(HealthCheckEvent event) { if (lambdaClient == null) lambdaClient = AWSLambdaClientBuilder.defaultClient(); threads.add(new Thread(() -> lambdaClient.invoke(new InvokeRequest() - .withFunctionName(((LambdaConnectorRuntime) ConnectorRuntime.getInstance()).getInvokedFunctionArn()) + .withFunctionName(((LambdaFunctionRuntime) FunctionRuntime.getInstance()).getInvokedFunctionArn()) .withPayload(ByteBuffer.wrap(newEvent))))); } threads.forEach(t -> t.start()); @@ -395,10 +392,7 @@ protected XyzResponse processHealthCheckEvent(HealthCheckEvent event) { Thread.sleep((event.getMinResponseTime() + start) - System.currentTimeMillis()); } catch (InterruptedException e) { - return new ErrorResponse() - .withErrorMessage(e.getMessage()) - .withStreamId(streamId) - .withError(XyzError.EXCEPTION); + throw new ErrorResponseException(EXCEPTION, e.getMessage()); } } return new HealthStatus(); diff --git a/xyz-connectors/src/main/java/com/here/xyz/connectors/StorageConnector.java b/xyz-connectors/src/main/java/com/here/xyz/connectors/StorageConnector.java index 3c20eb16ed..374daeddd1 100644 --- a/xyz-connectors/src/main/java/com/here/xyz/connectors/StorageConnector.java +++ b/xyz-connectors/src/main/java/com/here/xyz/connectors/StorageConnector.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2017-2023 HERE Europe B.V. + * Copyright (C) 2017-2024 HERE Europe B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,6 +19,12 @@ package com.here.xyz.connectors; +import static com.here.xyz.events.GetFeaturesByTileEvent.ResponseType.MVT; +import static com.here.xyz.events.GetFeaturesByTileEvent.ResponseType.MVT_FLATTENED; +import static com.here.xyz.responses.XyzError.EXCEPTION; +import static com.here.xyz.responses.XyzError.NOT_IMPLEMENTED; +import static java.lang.Boolean.TRUE; + import com.here.xyz.events.DeleteChangesetsEvent; import com.here.xyz.events.Event; import com.here.xyz.events.GetChangesetStatisticsEvent; @@ -37,142 +43,186 @@ import com.here.xyz.events.ModifySubscriptionEvent; import com.here.xyz.events.OneTimeActionEvent; import com.here.xyz.events.SearchForFeaturesEvent; +import com.here.xyz.models.geojson.implementation.FeatureCollection; +import com.here.xyz.responses.BinaryResponse; +import com.here.xyz.responses.ChangesetsStatisticsResponse; import com.here.xyz.responses.ErrorResponse; +import com.here.xyz.responses.StatisticsResponse; +import com.here.xyz.responses.StorageStatistics; import com.here.xyz.responses.SuccessResponse; -import com.here.xyz.responses.XyzError; import com.here.xyz.responses.XyzResponse; +import com.here.xyz.responses.changesets.ChangesetCollection; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; public abstract class StorageConnector extends AbstractConnectorHandler { + private static final Logger logger = LogManager.getLogger(); @Override public XyzResponse processEvent(Event event) throws Exception { if (event == null) { return new ErrorResponse() .withStreamId(streamId) - .withError(XyzError.EXCEPTION) + .withError(EXCEPTION) .withErrorMessage("Cannot parse the event or the the received event is empty."); } return _processEvent(event); } private XyzResponse _processEvent(Event event) throws Exception { - if (event instanceof ModifySpaceEvent) - return processModifySpaceEvent((ModifySpaceEvent) event); - if (event instanceof ModifySubscriptionEvent) - return processModifySubscriptionEvent((ModifySubscriptionEvent) event); - if (event instanceof ModifyFeaturesEvent) - return processModifyFeaturesEvent((ModifyFeaturesEvent) event); - if (event instanceof GetFeaturesByGeometryEvent) - return processGetFeaturesByGeometryEvent((GetFeaturesByGeometryEvent) event); - if (event instanceof GetFeaturesByTileEvent) - return processGetFeaturesByTileEvent((GetFeaturesByTileEvent) event); - if (event instanceof GetFeaturesByBBoxEvent) - return processGetFeaturesByBBoxEvent((GetFeaturesByBBoxEvent) event); - if (event instanceof IterateFeaturesEvent) - return processIterateFeaturesEvent((IterateFeaturesEvent) event); - if (event instanceof IterateChangesetsEvent) - return processIterateChangesetsEvent((IterateChangesetsEvent) event); - if (event instanceof GetChangesetStatisticsEvent) - return processGetChangesetsStatisticsEvent((GetChangesetStatisticsEvent) event); - if (event instanceof SearchForFeaturesEvent) - return processSearchForFeaturesEvent((SearchForFeaturesEvent) event); - if (event instanceof GetStatisticsEvent) - return processGetStatistics((GetStatisticsEvent) event); - if (event instanceof HealthCheckEvent) - return processHealthCheckEvent((HealthCheckEvent) event); - if (event instanceof GetFeaturesByIdEvent) - return processGetFeaturesByIdEvent((GetFeaturesByIdEvent) event); - if (event instanceof LoadFeaturesEvent) - return processLoadFeaturesEvent((LoadFeaturesEvent) event); - if (event instanceof GetStorageStatisticsEvent) - return processGetStorageStatisticsEvent((GetStorageStatisticsEvent) event); - if (event instanceof DeleteChangesetsEvent) - return processDeleteChangesetsEvent((DeleteChangesetsEvent) event); - if (event instanceof OneTimeActionEvent) - return processOneTimeActionEvent((OneTimeActionEvent) event); - - return new ErrorResponse() - .withStreamId(streamId) - .withError(XyzError.NOT_IMPLEMENTED) - .withErrorMessage("Unknown event type '" + event.getClass().getSimpleName() + "'"); + logger.info("{} Received " + event.getClass().getSimpleName(), traceItem); + try { + if (event instanceof ModifySpaceEvent) + return processModifySpaceEvent((ModifySpaceEvent) event); + if (event instanceof ModifySubscriptionEvent) + return processModifySubscriptionEvent((ModifySubscriptionEvent) event); + if (event instanceof ModifyFeaturesEvent) + return processModifyFeaturesEvent((ModifyFeaturesEvent) event); + if (event instanceof GetFeaturesByGeometryEvent) + return processGetFeaturesByGeometryEvent((GetFeaturesByGeometryEvent) event); + if (event instanceof GetFeaturesByTileEvent tileEvent) { + if (tileEvent.getResponseType() == MVT || tileEvent.getResponseType() == MVT_FLATTENED) { + try { + return processBinaryGetFeaturesByTileEvent(tileEvent); + } + catch (UnsupportedOperationException e) { + //Fall back to GeoJSON, the service will perform the transformation into MVT instead + return processGetFeaturesByTileEvent(tileEvent); + } + } + return processGetFeaturesByTileEvent(tileEvent); + } + if (event instanceof GetFeaturesByBBoxEvent) + return processGetFeaturesByBBoxEvent((GetFeaturesByBBoxEvent) event); + if (event instanceof IterateFeaturesEvent) + return processIterateFeaturesEvent((IterateFeaturesEvent) event); + if (event instanceof IterateChangesetsEvent) + return processIterateChangesetsEvent((IterateChangesetsEvent) event); + if (event instanceof GetChangesetStatisticsEvent) + return processGetChangesetsStatisticsEvent((GetChangesetStatisticsEvent) event); + if (event instanceof SearchForFeaturesEvent) + return processSearchForFeaturesEvent((SearchForFeaturesEvent) event); + if (event instanceof GetStatisticsEvent) + return processGetStatistics((GetStatisticsEvent) event); + if (event instanceof HealthCheckEvent) + return processHealthCheckEvent((HealthCheckEvent) event); + if (event instanceof GetFeaturesByIdEvent) + return processGetFeaturesByIdEvent((GetFeaturesByIdEvent) event); + if (event instanceof LoadFeaturesEvent) + return processLoadFeaturesEvent((LoadFeaturesEvent) event); + if (event instanceof GetStorageStatisticsEvent) + return processGetStorageStatisticsEvent((GetStorageStatisticsEvent) event); + if (event instanceof DeleteChangesetsEvent) + return processDeleteChangesetsEvent((DeleteChangesetsEvent) event); + if (event instanceof OneTimeActionEvent) + return processOneTimeActionEvent((OneTimeActionEvent) event); + + return new ErrorResponse() + .withStreamId(streamId) + .withError(NOT_IMPLEMENTED) + .withErrorMessage("Unknown event type: " + event.getClass().getSimpleName()); + } + catch (Exception e) { + handleProcessingException(e, event); + return new ErrorResponse() + .withStreamId(streamId) + .withError(EXCEPTION) + .withErrorMessage("Unhandled exception: " + e.getMessage()); + } + finally { + logger.info("{} Finished " + event.getClass().getSimpleName(), traceItem); + } + } + + protected static boolean mvtSupported(Event event) { + return event.getConnectorParams() != null && event.getConnectorParams().get("mvtSupport") == TRUE; } /** * Processes a GetStatistics event. */ @SuppressWarnings("WeakerAccess") - protected abstract XyzResponse processGetStatistics(GetStatisticsEvent event) throws Exception; + protected abstract StatisticsResponse processGetStatistics(GetStatisticsEvent event) throws Exception; /** * Processes a GetFeaturesById event. */ @SuppressWarnings("WeakerAccess") - protected abstract XyzResponse processGetFeaturesByIdEvent(GetFeaturesByIdEvent event) throws Exception; + protected abstract FeatureCollection processGetFeaturesByIdEvent(GetFeaturesByIdEvent event) throws Exception; /** * Processes a GetFeaturesByGeometryEvent event. */ @SuppressWarnings("WeakerAccess") - protected abstract XyzResponse processGetFeaturesByGeometryEvent(GetFeaturesByGeometryEvent event) throws Exception; + protected abstract FeatureCollection processGetFeaturesByGeometryEvent(GetFeaturesByGeometryEvent event) throws Exception; /** * Processes a GetFeaturesByBBox event. */ @SuppressWarnings("WeakerAccess") - protected abstract XyzResponse processGetFeaturesByBBoxEvent(GetFeaturesByBBoxEvent event) throws Exception; + protected abstract FeatureCollection processGetFeaturesByBBoxEvent(GetFeaturesByBBoxEvent event) throws Exception; /** * Processes a GetFeaturesByTile event. */ @SuppressWarnings("WeakerAccess") - protected abstract XyzResponse processGetFeaturesByTileEvent(GetFeaturesByTileEvent event) throws Exception; + protected abstract FeatureCollection processGetFeaturesByTileEvent(GetFeaturesByTileEvent event) throws Exception; + + /** + * Processes a binary GetFeaturesByTile event. + */ + protected BinaryResponse processBinaryGetFeaturesByTileEvent(GetFeaturesByTileEvent event) throws Exception { + throw new UnsupportedOperationException(event.getClass().getSimpleName() + ": No binary support was implemented."); + } /** * Processes a IterateFeatures event. */ @SuppressWarnings("WeakerAccess") - protected abstract XyzResponse processIterateFeaturesEvent(IterateFeaturesEvent event) throws Exception; + protected abstract FeatureCollection processIterateFeaturesEvent(IterateFeaturesEvent event) throws Exception; /** * Processes a SearchForFeatures event. */ @SuppressWarnings("WeakerAccess") - protected abstract XyzResponse processSearchForFeaturesEvent(SearchForFeaturesEvent event) throws Exception; + protected abstract FeatureCollection processSearchForFeaturesEvent(SearchForFeaturesEvent event) throws Exception; /** * Processes a LoadFeaturesEvent event. */ @SuppressWarnings("WeakerAccess") - protected abstract XyzResponse processLoadFeaturesEvent(LoadFeaturesEvent event) throws Exception; + protected abstract FeatureCollection processLoadFeaturesEvent(LoadFeaturesEvent event) throws Exception; /** * Processes a ModifyFeaturesEvent event. */ @SuppressWarnings("WeakerAccess") - protected abstract XyzResponse processModifyFeaturesEvent(ModifyFeaturesEvent event) throws Exception; + protected abstract FeatureCollection processModifyFeaturesEvent(ModifyFeaturesEvent event) throws Exception; /** * Processes a DeleteSpaceEvent event. */ @SuppressWarnings("WeakerAccess") - protected abstract XyzResponse processModifySpaceEvent(ModifySpaceEvent event) throws Exception; + protected abstract SuccessResponse processModifySpaceEvent(ModifySpaceEvent event) throws Exception; /** * Processes a ModifySubscriptionEvent event. */ @SuppressWarnings("WeakerAccess") - protected abstract XyzResponse processModifySubscriptionEvent(ModifySubscriptionEvent event) throws Exception; + protected abstract SuccessResponse processModifySubscriptionEvent(ModifySubscriptionEvent event) throws Exception; - protected abstract XyzResponse processGetStorageStatisticsEvent(GetStorageStatisticsEvent event) throws Exception; + protected abstract StorageStatistics processGetStorageStatisticsEvent(GetStorageStatisticsEvent event) throws Exception; protected XyzResponse processOneTimeActionEvent(OneTimeActionEvent event) throws Exception { //Default implementation does nothing but may be overridden return new SuccessResponse(); } - protected abstract XyzResponse processDeleteChangesetsEvent(DeleteChangesetsEvent event) throws Exception; + protected abstract SuccessResponse processDeleteChangesetsEvent(DeleteChangesetsEvent event) throws Exception; + + protected abstract ChangesetCollection processIterateChangesetsEvent(IterateChangesetsEvent event) throws Exception; - protected abstract XyzResponse processIterateChangesetsEvent(IterateChangesetsEvent event) throws Exception; + protected abstract ChangesetsStatisticsResponse processGetChangesetsStatisticsEvent(GetChangesetStatisticsEvent event) throws Exception; - protected abstract XyzResponse processGetChangesetsStatisticsEvent(GetChangesetStatisticsEvent event) throws Exception; + protected abstract void handleProcessingException(Exception exception, Event event) throws Exception; } diff --git a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/JDBCExporter.java b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/JDBCExporter.java index 9d8f8b6424..b4a862e409 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/JDBCExporter.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/JDBCExporter.java @@ -27,6 +27,7 @@ import static com.here.xyz.httpconnector.util.jobs.Job.CSVFormat.PARTITIONED_JSON_WKB; import static com.here.xyz.httpconnector.util.jobs.Job.CSVFormat.PARTITIONID_FC_B64; import static com.here.xyz.httpconnector.util.jobs.Job.CSVFormat.TILEID_FC_B64; +import static com.here.xyz.models.hub.Ref.HEAD; import com.here.xyz.connectors.ErrorResponseException; import com.here.xyz.events.ContextAwareEvent; @@ -37,20 +38,17 @@ import com.here.xyz.httpconnector.config.query.ExportSpace; import com.here.xyz.httpconnector.config.query.ExportSpaceByGeometry; import com.here.xyz.httpconnector.config.query.ExportSpaceByProperties; -import com.here.xyz.httpconnector.rest.HApiParam; import com.here.xyz.httpconnector.task.JdbcBasedHandler; import com.here.xyz.httpconnector.util.jobs.Export; import com.here.xyz.httpconnector.util.jobs.Export.ExportStatistic; import com.here.xyz.httpconnector.util.jobs.Job.CSVFormat; import com.here.xyz.httpconnector.util.web.LegacyHubWebClient; import com.here.xyz.hub.connectors.models.Connector; -import com.here.xyz.hub.rest.ApiParam; import com.here.xyz.jobs.datasets.DatasetDescription.Space; import com.here.xyz.jobs.datasets.filters.Filters; import com.here.xyz.jobs.datasets.filters.SpatialFilter; import com.here.xyz.models.geojson.coordinates.WKTHelper; import com.here.xyz.models.hub.Ref; -import com.here.xyz.psql.query.SearchForFeatures; import com.here.xyz.util.Hasher; import com.here.xyz.util.db.JdbcClient; import com.here.xyz.util.db.SQLQuery; @@ -163,12 +161,12 @@ private static SQLQuery buildCopyContentQuery(JdbcClient client, Export job, boo ////// get filters from source space if( job.getSource() != null && job.getSource() instanceof Space ) { Filters f = ((Space) job.getSource()).getFilters(); - propertyFilter = ( f == null ? null : f.getPropertyFilter() ); + propertyFilter = ( f == null ? null : f.getPropertyFilterAsString() ); spatialFilter = ( f == null ? null : f.getSpatialFilter() ); } ////// if filters not provided by source space the get filter from job (legacy behaviour) if( propertyFilter == null ) - propertyFilter = (job.getFilters() == null ? null : job.getFilters().getPropertyFilter()); + propertyFilter = (job.getFilters() == null ? null : job.getFilters().getPropertyFilterAsString()); if( spatialFilter == null ) spatialFilter = (job.getFilters() == null ? null : job.getFilters().getSpatialFilter()); @@ -183,10 +181,10 @@ private static SQLQuery buildCopyContentQuery(JdbcClient client, Export job, boo if( event.getParams() != null && event.getParams().get("versionsToKeep") != null ) event.setVersionsToKeep((int) event.getParams().get("versionsToKeep") ); // -> forcing "...AND next_version = maxBigInt..." in query - event.setRef( job.getTargetVersion() == null ? new Ref("HEAD") : new Ref(job.getTargetVersion()) ); + event.setRef( job.getTargetVersion() == null ? new Ref(HEAD) : new Ref(job.getTargetVersion()) ); if (propertyFilter != null) { - PropertiesQuery propertyQueryLists = HApiParam.Query.parsePropertiesQuery(propertyFilter, "", false); + PropertiesQuery propertyQueryLists = PropertiesQuery.fromString(propertyFilter, "", false); event.setPropertiesQuery(propertyQueryLists); } @@ -198,7 +196,7 @@ private static SQLQuery buildCopyContentQuery(JdbcClient client, Export job, boo try { - return ((ExportSpace) getQueryRunner(client, spatialFilter, event)) + return getQueryRunner(client, spatialFilter, event) //TODO: Why not selecting the feature id / geo here? //FIXME: Do not select operation / author as part of the "property-selection"-fragment .withSelectionOverride(new SQLQuery("jsondata, operation, author")) @@ -271,13 +269,13 @@ private boolean isIncrementalExportNonComposite( CSVFormat csvFormat, String tar } public Future executeExport(Export job, String s3Bucket, String s3Path, String s3Region) { - logger.info("job[{}] Execute Export-legacy csvFormat({}) ParamCompositeMode({}) PartitionKey({})", job.getId(), job.getCsvFormat(), job.readParamCompositeMode(), job.getPartitionKey() ); - + logger.info("job[{}] Execute Export-legacy csvFormat({}) ParamCompositeMode({}) PartitionKey({})", job.getId(), job.getCsvFormat(), job.readParamCompositeMode(), job.getPartitionKey() ); + return getClient(job.getTargetConnector()) .compose(client -> { String schema = getDbSettings(job.getTargetConnector()).getSchema(); try { - String propertyFilter = (job.getFilters() == null ? null : job.getFilters().getPropertyFilter()); + String propertyFilter = (job.getFilters() == null ? null : job.getFilters().getPropertyFilterAsString()); SpatialFilter spatialFilter = (job.getFilters() == null ? null : job.getFilters().getSpatialFilter()); SQLQuery exportQuery; @@ -331,7 +329,7 @@ public Future executeExport(Export job, String s3Bucket, String Is used for incremental exports (tiles) - here we have to export modified tiles. Those tiles we need to calculate separately */ - boolean isIncrementalExport = job.isIncrementalMode() + boolean isIncrementalExport = job.isIncrementalMode() || isIncrementalExportNonComposite(job.getCsvFormat(), job.getTargetVersion(), compositeCalculation) ; final SQLQuery qkQuery = ( compositeCalculation || isIncrementalExport ) @@ -490,7 +488,7 @@ public SQLQuery buildS3ExportQuery(JdbcClient client, Export j, String schema, String s3Bucket, String s3Path, String s3FilePrefix, String s3Region, boolean isForCompositeContentDetection, SQLQuery customWhereCondition) throws SQLException { - String propertyFilter = (j.getFilters() == null ? null : j.getFilters().getPropertyFilter()); + String propertyFilter = (j.getFilters() == null ? null : j.getFilters().getPropertyFilterAsString()); SpatialFilter spatialFilter= (j.getFilters() == null ? null : j.getFilters().getSpatialFilter()); s3Path = s3Path+ "/" +(s3FilePrefix == null ? "" : s3FilePrefix)+"export"; @@ -522,7 +520,7 @@ public SQLQuery buildS3ExportQuery(JdbcClient client, Export j, String schema, private SQLQuery buildPartIdVMLExportQuery(JdbcClient client, Export j, String schema, String s3Bucket, String s3Path, String s3FilePrefix, String s3Region, boolean isForCompositeContentDetection, SQLQuery customWhereCondition) throws SQLException { //Generic partition - String propertyFilter = (j.getFilters() == null ? null : j.getFilters().getPropertyFilter()); + String propertyFilter = (j.getFilters() == null ? null : j.getFilters().getPropertyFilterAsString()); SpatialFilter spatialFilter= (j.getFilters() == null ? null : j.getFilters().getSpatialFilter()); s3Path = s3Path+ "/" +(s3FilePrefix == null ? "" : s3FilePrefix)+"export.csv"; @@ -548,7 +546,7 @@ private SQLQuery buildPartIdVMLExportQuery(JdbcClient client, Export j, String s private SQLQuery buildVMLExportQuery(JdbcClient client, Export j, String schema, String s3Bucket, String s3Path, String s3Region, String parentQk, SQLQuery qkTileQry) throws SQLException { //Tiled export - String propertyFilter = (j.getFilters() == null ? null : j.getFilters().getPropertyFilter()); + String propertyFilter = (j.getFilters() == null ? null : j.getFilters().getPropertyFilterAsString()); SpatialFilter spatialFilter= (j.getFilters() == null ? null : j.getFilters().getSpatialFilter()); int maxTilesPerFile = j.getMaxTilesPerFile() == 0 ? 4096 : j.getMaxTilesPerFile(); @@ -558,7 +556,7 @@ private SQLQuery buildVMLExportQuery(JdbcClient client, Export j, String schema, if (targetVersion != null) { Ref ref = new Ref(targetVersion); if( ref.isRange() ) - targetVersion = "" + ref.getToVersion(); + targetVersion = "" + ref.getEndVersion(); } /* incremental */ @@ -616,7 +614,7 @@ private SQLQuery generateFilteredExportQueryForCompositeTileCalculation(JdbcClie } private SQLQuery generateFilteredExportQuery(JdbcClient client, String schema, String spaceId, String propertyFilter, - SpatialFilter spatialFilter, String targetVersion, Map params, CSVFormat csvFormat, SQLQuery customWhereCondition, + SpatialFilter spatialFilter, String targetVersion, Map params, CSVFormat csvFormat, SQLQuery customWhereCondition, boolean isForCompositeContentDetection, String partitionKey, Boolean omitOnNull, boolean isIncrementalExport ) throws SQLException { @@ -653,7 +651,7 @@ private SQLQuery generateFilteredExportQuery(JdbcClient client, String schema, S { extStashed = ext; params.remove("extends"); // needs to be removed and restored later on s.'DS-587' // except in case of L2 extends - } + } } context = ContextAwareEvent.SpaceContext.DEFAULT; } @@ -664,7 +662,7 @@ private SQLQuery generateFilteredExportQuery(JdbcClient client, String schema, S event.setRef(new Ref(targetVersion)); if (propertyFilter != null) { - PropertiesQuery propertyQueryLists = HApiParam.Query.parsePropertiesQuery(propertyFilter, "", false); + PropertiesQuery propertyQueryLists = PropertiesQuery.fromString(propertyFilter, "", false); event.setPropertiesQuery(propertyQueryLists); } @@ -687,7 +685,7 @@ private SQLQuery generateFilteredExportQuery(JdbcClient client, String schema, S contentQueryByPropertyValue = null; try { - final ExportSpace queryRunner = (ExportSpace) getQueryRunner(client, spatialFilter, event); + final ExportSpace queryRunner = getQueryRunner(client, spatialFilter, event); if (customWhereCondition != null && (csvFormat != PARTITIONID_FC_B64 || partitionByFeatureId)) queryRunner.withCustomWhereClause(customWhereCondition); @@ -698,7 +696,7 @@ private SQLQuery generateFilteredExportQuery(JdbcClient client, String schema, S if (partitionByPropertyValue && isForCompositeContentDetection) { event.setContext(ctxStashed); - contentQueryByPropertyValue = ((ExportSpace) getQueryRunner(client, spatialFilter, event)) + contentQueryByPropertyValue = getQueryRunner(client, spatialFilter, event) .withGeoOverride(buildGeoFragment(spatialFilter)) .buildQuery(event); } @@ -747,7 +745,7 @@ private SQLQuery generateFilteredExportQuery(JdbcClient client, String schema, S if( partitionByPropertyValue ) { - String converted = ApiParam.getConvertedKey(partitionKey); + String converted = PropertiesQuery.getConvertedKey(partitionKey); partitionKey = String.join("'->'",(converted != null ? converted : partitionKey).split("\\.")); //TODO: Simplify / structure the following query blob partQry = @@ -801,7 +799,7 @@ private SQLQuery generateFilteredExportQuery(JdbcClient client, String schema, S private static SQLQuery buildGeoFragment(SpatialFilter spatialFilter) { if (spatialFilter != null && spatialFilter.isClipped()) { - if( spatialFilter.getRadius() != 0 ) + if( spatialFilter.getRadius() != 0 ) return new SQLQuery("ST_Intersection(ST_MakeValid(geo), ST_Buffer(st_force3d(ST_GeomFromText(#{wktGeometry}))::geography, #{radius})::geometry) as geo") .withNamedParameter("wktGeometry", WKTHelper.geometryToWKT2d(spatialFilter.getGeometry())) .withNamedParameter("radius", spatialFilter.getRadius()); @@ -813,9 +811,9 @@ private static SQLQuery buildGeoFragment(SpatialFilter spatialFilter) { return new SQLQuery("geo"); } - private static SearchForFeatures getQueryRunner(JdbcClient client, SpatialFilter spatialFilter, GetFeaturesByGeometryEvent event) + private static ExportSpace getQueryRunner(JdbcClient client, SpatialFilter spatialFilter, GetFeaturesByGeometryEvent event) throws SQLException, ErrorResponseException { - SearchForFeatures queryRunner; + ExportSpace queryRunner; if (spatialFilter == null) queryRunner = new ExportSpaceByProperties(event); else diff --git a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/JDBCMaintainer.java b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/JDBCMaintainer.java index 5076e62e39..31b5c7839d 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/JDBCMaintainer.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/JDBCMaintainer.java @@ -34,9 +34,9 @@ import com.here.xyz.responses.maintenance.SpaceStatus; import com.here.xyz.util.Hasher; import com.here.xyz.util.db.ConnectorParameters; -import com.here.xyz.util.db.DatabaseSettings; import com.here.xyz.util.db.JdbcClient; import com.here.xyz.util.db.SQLQuery; +import com.here.xyz.util.db.datasource.DatabaseSettings; import com.here.xyz.util.service.Core; import io.vertx.core.Future; import java.io.IOException; diff --git a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/query/ExportSpace.java b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/query/ExportSpace.java index 3b72487fa7..a362dc2099 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/query/ExportSpace.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/query/ExportSpace.java @@ -19,15 +19,25 @@ package com.here.xyz.httpconnector.config.query; +import static com.here.xyz.psql.query.GetFeatures.MAX_BIGINT; +import static com.here.xyz.util.db.pg.XyzSpaceTableHelper.SCHEMA; +import static com.here.xyz.util.db.pg.XyzSpaceTableHelper.TABLE; + import com.here.xyz.connectors.ErrorResponseException; +import com.here.xyz.events.ContextAwareEvent; import com.here.xyz.events.SearchForFeaturesEvent; +import com.here.xyz.events.SelectiveEvent; +import com.here.xyz.models.hub.Ref; import com.here.xyz.util.db.SQLQuery; +import com.here.xyz.util.db.datasource.DataSourceProvider; import java.sql.SQLException; //TODO: Remove that hack after refactoring is complete public interface ExportSpace { SQLQuery buildQuery(E event) throws SQLException, ErrorResponseException; + void setDataSourceProvider(DataSourceProvider dataSourceProvider); + ExportSpace withSelectionOverride(SQLQuery selectionOverride); ExportSpace withGeoOverride(SQLQuery geoOverride); @@ -48,4 +58,77 @@ default SQLQuery patchWhereClause(SQLQuery filterWhereClause, SQLQuery customWhe .withQueryFragment("customWhereClause", customWhereClause); return customizedWhereClause; } + + default boolean isVersionRange(E event) { + return event.getRef().isRange(); + } + + default SQLQuery buildVersionComparisonTileCalculation(SelectiveEvent event) { + Ref ref = event.getRef(); + + if( ref == null || !ref.isRange() ) + return new SQLQuery(""); + + return new SQLQuery( // e.g. all features that where visible either in version "fromVersion" or "toVersion" and have changed between fromVersion and toVersion + """ + AND ( ( version <= #{toVersion} and next_version > #{toVersion} ) + OR ( version <= #{fromVersion} and next_version > #{fromVersion} ) + ) + AND id in ( select distinct id FROM ${schema}.${table} WHERE version > #{fromVersion} and version <= #{toVersion} ) + """ + ).withNamedParameter("fromVersion", ref.getStartVersion()) + .withNamedParameter("toVersion", ref.getEndVersion()); + } + + SQLQuery buildSelectClause(E event, int dataset); + SQLQuery buildFiltersFragment(E event, boolean isExtension, SQLQuery filterWhereClause, int dataset); + SQLQuery buildFilterWhereClause(E event); + String getSchema(); + String getDefaultTable(E event); + String buildOuterOrderByFragment(ContextAwareEvent event); + SQLQuery buildLimitFragment(E event); + + default SQLQuery buildVersionComparisonForRange(SelectiveEvent event) { + Ref ref = event.getRef(); + if (event.getVersionsToKeep() == 1 || ref.isAllVersions() || ref.isHead()) + return new SQLQuery(""); + + return new SQLQuery("AND version > #{fromVersion} AND version <= #{toVersion}") + .withNamedParameter("fromVersion", ref.getStartVersion()) + .withNamedParameter("toVersion", ref.getEndVersion()); + } + + default SQLQuery buildNextVersionFragmentForRange(Ref ref, boolean historyEnabled, String versionParamName) { + if (!historyEnabled || ref.isAllVersions()) + return new SQLQuery(""); + + boolean endVersionIsHead = ref.getEndVersion() == MAX_BIGINT; + //TODO: review semantic of "NextVersionFragment" in case of ref.isRange + return new SQLQuery("AND next_version ${{op}} #{" + versionParamName + "}") + .withQueryFragment("op", endVersionIsHead ? "=" : ">") + .withNamedParameter(versionParamName, endVersionIsHead ? MAX_BIGINT : ref.getEndVersion()); + } + + default SQLQuery buildVersionCheckFragment(E event) { + return new SQLQuery("${{versionComparison}} ${{nextVersion}} ${{minVersion}}") + .withQueryFragment("versionComparison", buildVersionComparisonTileCalculation(event)) + .withQueryFragment("nextVersion", new SQLQuery("")) // remove standard fragment s. buildVersionComparisonTileCalculation + .withQueryFragment("minVersion", new SQLQuery("")); // remove standard fragment + } + + default SQLQuery buildMainIncrementalQuery(E event) { + return new SQLQuery( + """ + SELECT ${{selectClause}} FROM ${schema}.${table} + WHERE ${{filters}} ${{versionCheck}} ${{outerOrderBy}} ${{limit}} + """ + ) + .withQueryFragment("selectClause", buildSelectClause(event, 0)) + .withQueryFragment("filters", buildFiltersFragment(event, false, buildFilterWhereClause(event), 0)) + .withVariable(SCHEMA, getSchema()) + .withVariable(TABLE, getDefaultTable(event)) + .withQueryFragment("versionCheck", buildVersionCheckFragment(event)) + .withQueryFragment("outerOrderBy", buildOuterOrderByFragment(event)) + .withQueryFragment("limit", buildLimitFragment(event)); + } } diff --git a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/query/ExportSpaceByGeometry.java b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/query/ExportSpaceByGeometry.java index 3aa8276ec4..4cd0915070 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/query/ExportSpaceByGeometry.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/query/ExportSpaceByGeometry.java @@ -19,8 +19,13 @@ package com.here.xyz.httpconnector.config.query; +import static com.here.xyz.events.ContextAwareEvent.SpaceContext.COMPOSITE_EXTENSION; + import com.here.xyz.connectors.ErrorResponseException; +import com.here.xyz.events.ContextAwareEvent; import com.here.xyz.events.GetFeaturesByGeometryEvent; +import com.here.xyz.events.SelectiveEvent; +import com.here.xyz.models.hub.Ref; import com.here.xyz.psql.query.GetFeaturesByGeometry; import com.here.xyz.util.db.SQLQuery; import java.sql.SQLException; @@ -36,11 +41,13 @@ public ExportSpaceByGeometry(GetFeaturesByGeometryEvent event) throws SQLExcepti @Override public SQLQuery buildQuery(GetFeaturesByGeometryEvent event) throws SQLException, ErrorResponseException { + if (event.getContext() == COMPOSITE_EXTENSION && isVersionRange(event)) + return buildMainIncrementalQuery(event); return super.buildQuery(event); } @Override - protected SQLQuery buildSelectClause(GetFeaturesByGeometryEvent event, int dataset) { + public SQLQuery buildSelectClause(GetFeaturesByGeometryEvent event, int dataset) { return patchSelectClause(super.buildSelectClause(event, dataset), selectionOverride); } @@ -52,12 +59,17 @@ protected SQLQuery buildGeoFragment(GetFeaturesByGeometryEvent event) { } @Override - protected SQLQuery buildFilterWhereClause(GetFeaturesByGeometryEvent event) { + public SQLQuery buildFilterWhereClause(GetFeaturesByGeometryEvent event) { return patchWhereClause(super.buildFilterWhereClause(event), customWhereClause); } @Override - protected SQLQuery buildLimitFragment(GetFeaturesByGeometryEvent event) { + public SQLQuery buildFiltersFragment(GetFeaturesByGeometryEvent event, boolean isExtension, SQLQuery filterWhereClause, int dataset) { + return super.buildFiltersFragment(event, isExtension, filterWhereClause, dataset); + } + + @Override + public SQLQuery buildLimitFragment(GetFeaturesByGeometryEvent event) { return new SQLQuery(""); } @@ -78,4 +90,33 @@ public ExportSpace withCustomWhereClause(SQLQuery cu this.customWhereClause = customWhereClause; return this; } + + @Override + public String getSchema() { + return super.getSchema(); + } + + @Override + public String getDefaultTable(GetFeaturesByGeometryEvent event) { + return super.getDefaultTable(event); + } + + @Override + public String buildOuterOrderByFragment(ContextAwareEvent event) { + return super.buildOuterOrderByFragment(event); + } + + @Override + public SQLQuery buildVersionComparison(SelectiveEvent event) { + if (event.getRef().isRange()) + return buildVersionComparisonForRange(event); + return super.buildVersionComparison(event); + } + + @Override + public SQLQuery buildNextVersionFragment(Ref ref, boolean historyEnabled, String versionParamName) { + if (ref.isRange()) + return buildNextVersionFragmentForRange(ref, historyEnabled, versionParamName); + return super.buildNextVersionFragment(ref, historyEnabled, versionParamName); + } } diff --git a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/query/ExportSpaceByProperties.java b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/query/ExportSpaceByProperties.java index 5bc29e2548..650a773f78 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/query/ExportSpaceByProperties.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/config/query/ExportSpaceByProperties.java @@ -19,10 +19,15 @@ package com.here.xyz.httpconnector.config.query; +import static com.here.xyz.events.ContextAwareEvent.SpaceContext.COMPOSITE_EXTENSION; + import com.here.xyz.connectors.ErrorResponseException; +import com.here.xyz.events.ContextAwareEvent; import com.here.xyz.events.GetFeaturesByGeometryEvent; import com.here.xyz.events.SearchForFeaturesEvent; +import com.here.xyz.events.SelectiveEvent; import com.here.xyz.models.geojson.implementation.FeatureCollection; +import com.here.xyz.models.hub.Ref; import com.here.xyz.psql.query.SearchForFeatures; import com.here.xyz.util.db.SQLQuery; import java.sql.SQLException; @@ -39,11 +44,13 @@ public ExportSpaceByProperties(GetFeaturesByGeometryEvent event) throws SQLExcep @Override public SQLQuery buildQuery(SearchForFeaturesEvent event) throws SQLException, ErrorResponseException { + if (event.getContext() == COMPOSITE_EXTENSION && isVersionRange(event)) + return buildMainIncrementalQuery(event); return super.buildQuery(event); } @Override - protected SQLQuery buildSelectClause(SearchForFeaturesEvent event, int dataset) { + public SQLQuery buildSelectClause(SearchForFeaturesEvent event, int dataset) { return patchSelectClause(super.buildSelectClause(event, dataset), selectionOverride); } @@ -55,12 +62,17 @@ protected SQLQuery buildGeoFragment(SearchForFeaturesEvent event) { } @Override - protected SQLQuery buildFilterWhereClause(SearchForFeaturesEvent event) { + public SQLQuery buildFilterWhereClause(SearchForFeaturesEvent event) { return patchWhereClause(super.buildFilterWhereClause(event), customWhereClause); } @Override - protected SQLQuery buildLimitFragment(SearchForFeaturesEvent event) { + public SQLQuery buildFiltersFragment(SearchForFeaturesEvent event, boolean isExtension, SQLQuery filterWhereClause, int dataset) { + return super.buildFiltersFragment(event, isExtension, filterWhereClause, dataset); + } + + @Override + public SQLQuery buildLimitFragment(SearchForFeaturesEvent event) { return new SQLQuery(""); } @@ -81,4 +93,33 @@ public ExportSpace withCustomWhereClause(SQLQuery custom this.customWhereClause = customWhereClause; return this; } + + @Override + public String getSchema() { + return super.getSchema(); + } + + @Override + public String getDefaultTable(SearchForFeaturesEvent event) { + return super.getDefaultTable(event); + } + + @Override + public String buildOuterOrderByFragment(ContextAwareEvent event) { + return super.buildOuterOrderByFragment(event); + } + + @Override + public SQLQuery buildVersionComparison(SelectiveEvent event) { + if (event.getRef().isRange()) + return buildVersionComparisonForRange(event); + return super.buildVersionComparison(event); + } + + @Override + public SQLQuery buildNextVersionFragment(Ref ref, boolean historyEnabled, String versionParamName) { + if (ref.isRange()) + return buildNextVersionFragmentForRange(ref, historyEnabled, versionParamName); + return super.buildNextVersionFragment(ref, historyEnabled, versionParamName); + } } diff --git a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/task/JdbcBasedHandler.java b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/task/JdbcBasedHandler.java index 4d4b831f58..8921a5cadf 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/task/JdbcBasedHandler.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/task/JdbcBasedHandler.java @@ -22,9 +22,9 @@ import com.here.xyz.httpconnector.CService; import com.here.xyz.httpconnector.util.web.LegacyHubWebClient; import com.here.xyz.util.db.ConnectorParameters; -import com.here.xyz.util.db.DatabaseSettings; import com.here.xyz.util.db.ECPSTool; import com.here.xyz.util.db.JdbcClient; +import com.here.xyz.util.db.datasource.DatabaseSettings; import com.here.xyz.util.db.datasource.PooledDataSources; import com.here.xyz.util.service.Core; import io.vertx.core.Future; diff --git a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/task/StatusHandler.java b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/task/StatusHandler.java index 1dee8f3d37..a2850ad02a 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/task/StatusHandler.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/httpconnector/task/StatusHandler.java @@ -27,9 +27,9 @@ import com.here.xyz.httpconnector.util.status.RDSStatus; import com.here.xyz.httpconnector.util.status.RunningQueryStatistic; import com.here.xyz.httpconnector.util.status.RunningQueryStatistics; -import com.here.xyz.util.db.DatabaseSettings; import com.here.xyz.util.db.JdbcClient; import com.here.xyz.util.db.SQLQuery; +import com.here.xyz.util.db.datasource.DatabaseSettings; import io.vertx.core.Future; import java.sql.ResultSet; import java.sql.SQLException; diff --git a/xyz-hub-service/src/main/java/com/here/xyz/hub/config/jdbc/JDBCConfigClient.java b/xyz-hub-service/src/main/java/com/here/xyz/hub/config/jdbc/JDBCConfigClient.java index 49c1986941..2f2504a556 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/hub/config/jdbc/JDBCConfigClient.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/hub/config/jdbc/JDBCConfigClient.java @@ -21,9 +21,9 @@ import com.here.xyz.httpconnector.CService; import com.here.xyz.hub.Service; -import com.here.xyz.util.db.DatabaseSettings; import com.here.xyz.util.db.JdbcClient; import com.here.xyz.util.db.SQLQuery; +import com.here.xyz.util.db.datasource.DatabaseSettings; import com.here.xyz.util.db.datasource.PooledDataSources; import io.vertx.core.Future; import io.vertx.core.json.Json; diff --git a/xyz-hub-service/src/main/java/com/here/xyz/hub/connectors/test/InMemoryStorage.java b/xyz-hub-service/src/main/java/com/here/xyz/hub/connectors/test/InMemoryStorage.java index 5021774a5f..35441045c5 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/hub/connectors/test/InMemoryStorage.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/hub/connectors/test/InMemoryStorage.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2017-2023 HERE Europe B.V. + * Copyright (C) 2017-2024 HERE Europe B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -39,11 +39,12 @@ import com.here.xyz.events.SearchForFeaturesEvent; import com.here.xyz.models.geojson.implementation.Feature; import com.here.xyz.models.geojson.implementation.FeatureCollection; +import com.here.xyz.responses.ChangesetsStatisticsResponse; import com.here.xyz.responses.StatisticsResponse; -import com.here.xyz.responses.StatisticsResponse.Value; +import com.here.xyz.responses.StorageStatistics; import com.here.xyz.responses.SuccessResponse; import com.here.xyz.responses.XyzError; -import com.here.xyz.responses.XyzResponse; +import com.here.xyz.responses.changesets.ChangesetCollection; import java.util.ArrayList; import java.util.Collections; import java.util.Map; @@ -55,63 +56,63 @@ public class InMemoryStorage extends StorageConnector { private static Map storage = new ConcurrentHashMap<>(); @Override - protected XyzResponse processModifySpaceEvent(ModifySpaceEvent event) throws Exception { + protected SuccessResponse processModifySpaceEvent(ModifySpaceEvent event) throws Exception { if (event.getSpace() != null) return new SuccessResponse(); throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } @Override - protected XyzResponse processModifySubscriptionEvent(ModifySubscriptionEvent event) throws Exception { + protected SuccessResponse processModifySubscriptionEvent(ModifySubscriptionEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processGetStatistics(GetStatisticsEvent event) throws Exception { + protected StatisticsResponse processGetStatistics(GetStatisticsEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processGetFeaturesByIdEvent(GetFeaturesByIdEvent event) throws Exception { + protected FeatureCollection processGetFeaturesByIdEvent(GetFeaturesByIdEvent event) throws Exception { return new FeatureCollection() .withFeatures(event.getIds().stream().map(id -> storage.get(id)).filter(f -> f != null).collect(Collectors.toList())); } @Override - protected XyzResponse processGetFeaturesByGeometryEvent(GetFeaturesByGeometryEvent event) throws Exception { + protected FeatureCollection processGetFeaturesByGeometryEvent(GetFeaturesByGeometryEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processGetFeaturesByBBoxEvent(GetFeaturesByBBoxEvent event) throws Exception { + protected FeatureCollection processGetFeaturesByBBoxEvent(GetFeaturesByBBoxEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processGetFeaturesByTileEvent(GetFeaturesByTileEvent event) throws Exception { + protected FeatureCollection processGetFeaturesByTileEvent(GetFeaturesByTileEvent event) throws Exception { return new FeatureCollection() .withFeatures(new ArrayList<>(storage.values())); } @Override - protected XyzResponse processIterateFeaturesEvent(IterateFeaturesEvent event) throws Exception { + protected FeatureCollection processIterateFeaturesEvent(IterateFeaturesEvent event) throws Exception { return new FeatureCollection() .withFeatures(new ArrayList<>(storage.values())); } @Override - protected XyzResponse processSearchForFeaturesEvent(SearchForFeaturesEvent event) throws Exception { + protected FeatureCollection processSearchForFeaturesEvent(SearchForFeaturesEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processLoadFeaturesEvent(LoadFeaturesEvent event) throws Exception { + protected FeatureCollection processLoadFeaturesEvent(LoadFeaturesEvent event) throws Exception { return new FeatureCollection() .withFeatures(Collections.emptyList()); } @Override - protected XyzResponse processModifyFeaturesEvent(ModifyFeaturesEvent event) throws Exception { + protected FeatureCollection processModifyFeaturesEvent(ModifyFeaturesEvent event) throws Exception { event.getInsertFeatures().forEach(f -> storage.put(f.getId(), f)); return new FeatureCollection() .withFeatures(event.getInsertFeatures()) @@ -119,26 +120,30 @@ protected XyzResponse processModifyFeaturesEvent(ModifyFeaturesEvent event) thro } @Override - protected XyzResponse processGetStorageStatisticsEvent(GetStorageStatisticsEvent event) throws Exception { - return new StatisticsResponse() - .withCount(new Value<>((long) storage.size()).withEstimated(false)); + protected StorageStatistics processGetStorageStatisticsEvent(GetStorageStatisticsEvent event) throws Exception { + throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processDeleteChangesetsEvent(DeleteChangesetsEvent event) throws Exception { + protected SuccessResponse processDeleteChangesetsEvent(DeleteChangesetsEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processIterateChangesetsEvent(IterateChangesetsEvent event) throws Exception { + protected ChangesetCollection processIterateChangesetsEvent(IterateChangesetsEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processGetChangesetsStatisticsEvent(GetChangesetStatisticsEvent event) throws Exception { + protected ChangesetsStatisticsResponse processGetChangesetsStatisticsEvent(GetChangesetStatisticsEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } + @Override + protected void handleProcessingException(Exception exception, Event event) throws Exception { + throw exception; + } + @Override protected void initialize(Event event) throws Exception {} } diff --git a/xyz-hub-service/src/main/java/com/here/xyz/hub/connectors/test/MockDelayStorageConnector.java b/xyz-hub-service/src/main/java/com/here/xyz/hub/connectors/test/MockDelayStorageConnector.java index b19066f74a..3141de90f3 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/hub/connectors/test/MockDelayStorageConnector.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/hub/connectors/test/MockDelayStorageConnector.java @@ -39,11 +39,12 @@ import com.here.xyz.events.SearchForFeaturesEvent; import com.here.xyz.models.geojson.implementation.Feature; import com.here.xyz.models.geojson.implementation.FeatureCollection; +import com.here.xyz.responses.ChangesetsStatisticsResponse; import com.here.xyz.responses.StatisticsResponse; -import com.here.xyz.responses.StatisticsResponse.Value; +import com.here.xyz.responses.StorageStatistics; import com.here.xyz.responses.SuccessResponse; import com.here.xyz.responses.XyzError; -import com.here.xyz.responses.XyzResponse; +import com.here.xyz.responses.changesets.ChangesetCollection; import java.util.ArrayList; import java.util.Collections; import java.util.Map; @@ -58,46 +59,46 @@ public class MockDelayStorageConnector extends StorageConnector { private static final Logger logger = LogManager.getLogger(); @Override - protected XyzResponse processModifySpaceEvent(ModifySpaceEvent event) throws Exception { + protected SuccessResponse processModifySpaceEvent(ModifySpaceEvent event) throws Exception { if (event.getSpace() != null) return new SuccessResponse(); throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } @Override - protected XyzResponse processModifySubscriptionEvent(ModifySubscriptionEvent event) throws Exception { + protected SuccessResponse processModifySubscriptionEvent(ModifySubscriptionEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processGetStatistics(GetStatisticsEvent event) throws Exception { + protected StatisticsResponse processGetStatistics(GetStatisticsEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processGetFeaturesByIdEvent(GetFeaturesByIdEvent event) throws Exception { + protected FeatureCollection processGetFeaturesByIdEvent(GetFeaturesByIdEvent event) throws Exception { return new FeatureCollection() .withFeatures(event.getIds().stream().map(id -> storage.get(id)).filter(f -> f != null).collect(Collectors.toList())); } @Override - protected XyzResponse processGetFeaturesByGeometryEvent(GetFeaturesByGeometryEvent event) throws Exception { + protected FeatureCollection processGetFeaturesByGeometryEvent(GetFeaturesByGeometryEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processGetFeaturesByBBoxEvent(GetFeaturesByBBoxEvent event) throws Exception { + protected FeatureCollection processGetFeaturesByBBoxEvent(GetFeaturesByBBoxEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processGetFeaturesByTileEvent(GetFeaturesByTileEvent event) throws Exception { + protected FeatureCollection processGetFeaturesByTileEvent(GetFeaturesByTileEvent event) throws Exception { return new FeatureCollection() .withFeatures(new ArrayList<>(storage.values())); } @Override - protected XyzResponse processIterateFeaturesEvent(IterateFeaturesEvent event) throws Exception { + protected FeatureCollection processIterateFeaturesEvent(IterateFeaturesEvent event) throws Exception { logger.warn("Iterate feature request in throttling storage."); Thread.sleep(2000); return new FeatureCollection() @@ -105,18 +106,18 @@ protected XyzResponse processIterateFeaturesEvent(IterateFeaturesEvent event) th } @Override - protected XyzResponse processSearchForFeaturesEvent(SearchForFeaturesEvent event) throws Exception { + protected FeatureCollection processSearchForFeaturesEvent(SearchForFeaturesEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processLoadFeaturesEvent(LoadFeaturesEvent event) throws Exception { + protected FeatureCollection processLoadFeaturesEvent(LoadFeaturesEvent event) throws Exception { return new FeatureCollection() .withFeatures(Collections.emptyList()); } @Override - protected XyzResponse processModifyFeaturesEvent(ModifyFeaturesEvent event) throws Exception { + protected FeatureCollection processModifyFeaturesEvent(ModifyFeaturesEvent event) throws Exception { event.getInsertFeatures().forEach(f -> storage.put(f.getId(), f)); return new FeatureCollection() .withFeatures(event.getInsertFeatures()) @@ -124,26 +125,30 @@ protected XyzResponse processModifyFeaturesEvent(ModifyFeaturesEvent event) thro } @Override - protected XyzResponse processGetStorageStatisticsEvent(GetStorageStatisticsEvent event) throws Exception { - return new StatisticsResponse() - .withCount(new Value<>((long) storage.size()).withEstimated(false)); + protected StorageStatistics processGetStorageStatisticsEvent(GetStorageStatisticsEvent event) throws Exception { + throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processDeleteChangesetsEvent(DeleteChangesetsEvent event) throws Exception { + protected SuccessResponse processDeleteChangesetsEvent(DeleteChangesetsEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processIterateChangesetsEvent(IterateChangesetsEvent event) throws Exception { + protected ChangesetCollection processIterateChangesetsEvent(IterateChangesetsEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } @Override - protected XyzResponse processGetChangesetsStatisticsEvent(GetChangesetStatisticsEvent event) throws Exception { + protected ChangesetsStatisticsResponse processGetChangesetsStatisticsEvent(GetChangesetStatisticsEvent event) throws Exception { throw new UnsupportedOperationException(event.getClass().getSimpleName() + " not implemented."); } + @Override + protected void handleProcessingException(Exception exception, Event event) throws Exception { + throw exception; + } + @Override protected void initialize(Event event) throws Exception {} } diff --git a/xyz-hub-service/src/main/java/com/here/xyz/hub/connectors/test/TestStorageConnector.java b/xyz-hub-service/src/main/java/com/here/xyz/hub/connectors/test/TestStorageConnector.java index 8e20b6794a..efef45779e 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/hub/connectors/test/TestStorageConnector.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/hub/connectors/test/TestStorageConnector.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2017-2023 HERE Europe B.V. + * Copyright (C) 2017-2024 HERE Europe B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,6 +19,8 @@ package com.here.xyz.hub.connectors.test; +import static com.here.xyz.responses.XyzError.ILLEGAL_ARGUMENT; + import com.here.xyz.connectors.ErrorResponseException; import com.here.xyz.connectors.StorageConnector; import com.here.xyz.events.DeleteChangesetsEvent; @@ -43,9 +45,12 @@ import com.here.xyz.models.geojson.implementation.Point; import com.here.xyz.models.geojson.implementation.Properties; import com.here.xyz.models.geojson.implementation.XyzNamespace; +import com.here.xyz.responses.ChangesetsStatisticsResponse; +import com.here.xyz.responses.StatisticsResponse; +import com.here.xyz.responses.StorageStatistics; import com.here.xyz.responses.SuccessResponse; import com.here.xyz.responses.XyzError; -import com.here.xyz.responses.XyzResponse; +import com.here.xyz.responses.changesets.ChangesetCollection; import com.here.xyz.util.service.Core; import java.util.Arrays; import org.apache.commons.lang3.RandomStringUtils; @@ -54,9 +59,9 @@ * A connector for testing handling of error responses in the service. */ public class TestStorageConnector extends StorageConnector { - //NOTE: this is is a special space ID. For it the connector will return a feature with a random id for each tile request. public static final String RANDOM_FEATURE_SPACE = "random_feature_test"; + public static final String ILLEGAL_ARGUMENT_SPACE = "illegal_argument_test"; public static final String HUGE_RESPONSE_SPACE = "huge_response_test_"; private static Feature sampleKBFeature = new Feature() .withId(RandomStringUtils.randomAlphanumeric(16)) @@ -66,47 +71,45 @@ public class TestStorageConnector extends StorageConnector { .withUpdatedAt(Core.currentTimeMillis()))); @Override - protected XyzResponse processModifySpaceEvent(ModifySpaceEvent event) throws Exception { - if (RANDOM_FEATURE_SPACE.equals(event.getSpace())) { - return new SuccessResponse(); - } - if (event.getSpace().contains(HUGE_RESPONSE_SPACE)) { + protected SuccessResponse processModifySpaceEvent(ModifySpaceEvent event) throws Exception { + if (RANDOM_FEATURE_SPACE.equals(event.getSpace()) || ILLEGAL_ARGUMENT_SPACE.equals(event.getSpace()) + || event.getSpace().contains(HUGE_RESPONSE_SPACE)) return new SuccessResponse(); - } - if (XyzError.forValue(event.getSpace()) != null) { + + if (XyzError.forValue(event.getSpace()) != null) return new SuccessResponse(); - } + throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } @Override - protected XyzResponse processModifySubscriptionEvent(ModifySubscriptionEvent event) throws Exception { - // Needs further implementation + protected SuccessResponse processModifySubscriptionEvent(ModifySubscriptionEvent event) throws Exception { + //Needs further implementation throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } @Override - protected XyzResponse processGetStatistics(GetStatisticsEvent event) throws Exception { + protected StatisticsResponse processGetStatistics(GetStatisticsEvent event) throws Exception { throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } @Override - protected XyzResponse processGetFeaturesByIdEvent(GetFeaturesByIdEvent event) throws Exception { + protected FeatureCollection processGetFeaturesByIdEvent(GetFeaturesByIdEvent event) throws Exception { throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } @Override - protected XyzResponse processGetFeaturesByGeometryEvent(GetFeaturesByGeometryEvent event) throws Exception { + protected FeatureCollection processGetFeaturesByGeometryEvent(GetFeaturesByGeometryEvent event) throws Exception { throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } @Override - protected XyzResponse processGetFeaturesByBBoxEvent(GetFeaturesByBBoxEvent event) throws Exception { + protected FeatureCollection processGetFeaturesByBBoxEvent(GetFeaturesByBBoxEvent event) throws Exception { throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } @Override - protected XyzResponse processGetFeaturesByTileEvent(GetFeaturesByTileEvent event) throws Exception { + protected FeatureCollection processGetFeaturesByTileEvent(GetFeaturesByTileEvent event) throws Exception { final String space = event.getSpace(); if (space.equals(RANDOM_FEATURE_SPACE)) { FeatureCollection fc = new FeatureCollection() @@ -115,7 +118,8 @@ protected XyzResponse processGetFeaturesByTileEvent(GetFeaturesByTileEvent event .withGeometry(new Point().withCoordinates(new PointCoordinates(0, 0))) .withProperties(new Properties()))); return fc; - } else if (space.contains(HUGE_RESPONSE_SPACE)) { + } + else if (space.contains(HUGE_RESPONSE_SPACE)) { int size = Integer.parseInt(space.substring(space.lastIndexOf("_")+1)) * 1024 * 1024; int numFeatures = size / sampleKBFeature.toByteArray().length; @@ -131,45 +135,53 @@ protected XyzResponse processGetFeaturesByTileEvent(GetFeaturesByTileEvent event } @Override - protected XyzResponse processIterateFeaturesEvent(IterateFeaturesEvent event) throws Exception { + protected FeatureCollection processIterateFeaturesEvent(IterateFeaturesEvent event) throws Exception { throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } @Override - protected XyzResponse processSearchForFeaturesEvent(SearchForFeaturesEvent event) throws Exception { + protected FeatureCollection processSearchForFeaturesEvent(SearchForFeaturesEvent event) throws Exception { + if (ILLEGAL_ARGUMENT_SPACE.equals(event.getSpace())) + throw new ErrorResponseException(ILLEGAL_ARGUMENT, "Invalid request parameters."); + throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } @Override - protected XyzResponse processLoadFeaturesEvent(LoadFeaturesEvent event) throws Exception { + protected FeatureCollection processLoadFeaturesEvent(LoadFeaturesEvent event) throws Exception { throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } @Override - protected XyzResponse processModifyFeaturesEvent(ModifyFeaturesEvent event) throws Exception { + protected FeatureCollection processModifyFeaturesEvent(ModifyFeaturesEvent event) throws Exception { throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } @Override - protected XyzResponse processGetStorageStatisticsEvent(GetStorageStatisticsEvent event) throws Exception { + protected StorageStatistics processGetStorageStatisticsEvent(GetStorageStatisticsEvent event) throws Exception { throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } @Override - protected XyzResponse processDeleteChangesetsEvent(DeleteChangesetsEvent event) throws Exception { + protected SuccessResponse processDeleteChangesetsEvent(DeleteChangesetsEvent event) throws Exception { throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } @Override - protected XyzResponse processIterateChangesetsEvent(IterateChangesetsEvent event) throws Exception { + protected ChangesetCollection processIterateChangesetsEvent(IterateChangesetsEvent event) throws Exception { throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } @Override - protected XyzResponse processGetChangesetsStatisticsEvent(GetChangesetStatisticsEvent event) throws Exception { + protected ChangesetsStatisticsResponse processGetChangesetsStatisticsEvent(GetChangesetStatisticsEvent event) throws Exception { throw new ErrorResponseException(event.getStreamId(), XyzError.forValue(event.getSpace()), event.getSpace() + " message."); } + @Override + protected void handleProcessingException(Exception exception, Event event) throws Exception { + throw exception; + } + @Override protected void initialize(Event event) throws Exception { diff --git a/xyz-hub-service/src/main/java/com/here/xyz/hub/rest/ApiParam.java b/xyz-hub-service/src/main/java/com/here/xyz/hub/rest/ApiParam.java index ea36343e41..40d7326d2e 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/hub/rest/ApiParam.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/hub/rest/ApiParam.java @@ -53,53 +53,6 @@ public class ApiParam { "f.tags", "properties.@ns:com:here:xyz.tags" ); - private static Object getConvertedValue(String rawValue) { - // Boolean - if (rawValue.equals("true")) { - return true; - } - if (rawValue.equals("false")) { - return false; - } - // Long - try { - return Long.parseLong(rawValue); - } catch (NumberFormatException ignored) { - } - // Double - try { - return Double.parseDouble(rawValue); - } catch (NumberFormatException ignored) { - } - - if (rawValue.length() > 2 && rawValue.charAt(0) == '"' && rawValue.charAt(rawValue.length() - 1) == '"') { - return rawValue.substring(1, rawValue.length() - 1); - } - - if (rawValue.length() > 2 && rawValue.charAt(0) == '\'' && rawValue.charAt(rawValue.length() - 1) == '\'') { - return rawValue.substring(1, rawValue.length() - 1); - } - - if(rawValue.equalsIgnoreCase(".null")) - return null; - - // String - return rawValue; - } - - public static String getConvertedKey(String rawKey) { - if (rawKey.startsWith("p.")) - return rawKey.replaceFirst("p.", "properties."); - - String replacement = SEARCH_KEY_REPLACEMENTS.get(rawKey); - - //Allow root property search by using f. - if (replacement == null && rawKey.startsWith(F_PREFIX)) - return rawKey.substring(2); - - return replacement; - } - public static class Header { } @@ -339,7 +292,7 @@ public static Integer[] getPart(RoutingContext context) { static PropertiesQuery getSpacePropertiesQuery(RoutingContext context, String param) { PropertiesQuery propertyQuery = context.get("propertyQuery"); if (propertyQuery == null) { - propertyQuery = parsePropertiesQuery(context.request().query(), param, true); + propertyQuery = PropertiesQuery.fromString(context.request().query(), param, true); context.put("propertyQuery", propertyQuery); } return propertyQuery; @@ -351,7 +304,7 @@ static PropertiesQuery getSpacePropertiesQuery(RoutingContext context, String pa static PropertiesQuery getPropertiesQuery(RoutingContext context) { PropertiesQuery propertyQuery = context.get("propertyQuery"); if (propertyQuery == null) { - propertyQuery = parsePropertiesQuery(context.request().query(), "", false); + propertyQuery = PropertiesQuery.fromString(context.request().query(), "", false); context.put("propertyQuery", propertyQuery); } return propertyQuery; @@ -402,84 +355,6 @@ static PropertyQuery getPropertyQuery(String query, String key, boolean multiVal return null; } - public static PropertiesQuery parsePropertiesQuery(String query, String property, boolean spaceProperties) { - if (query == null || query.length() == 0) - return null; - - PropertyQueryList pql = new PropertyQueryList(); - Stream.of(query.split("&")) - .map(queryParam -> queryParam.startsWith("tags=") ? transformLegacyTags(queryParam) : queryParam) - .filter(queryParam -> queryParam.startsWith("p.") || queryParam.startsWith(F_PREFIX) || spaceProperties) - .forEach(keyValuePair -> { - PropertyQuery propertyQuery = new PropertyQuery(); - - String operatorComma = "-#:comma:#-"; - try { - keyValuePair = keyValuePair.replaceAll(",", operatorComma); - keyValuePair = URLDecoder.decode(keyValuePair, "utf-8"); - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - } - - int position=0; - String op=null; - - //store "main" operator. Needed for such cases foo=bar-->test - for (String shortOperator : QueryOperation.inputRepresentations()) { - int currentPositionOfOp = keyValuePair.indexOf(shortOperator); - if (currentPositionOfOp != -1) { - if( - // feature properties query - (!spaceProperties && (op == null || currentPositionOfOp < position || ( currentPositionOfOp == position && op.length() < shortOperator.length() ))) || - // space properties query - (keyValuePair.substring(0,currentPositionOfOp).equals(property) && spaceProperties && (op == null || currentPositionOfOp < position || ( currentPositionOfOp == position && op.length() < shortOperator.length() ))) - ) { - op = shortOperator; - position = currentPositionOfOp; - } - } - } - - if (op != null) { - String[] keyVal = new String[] { - keyValuePair.substring(0, position).replaceAll(operatorComma,","), - keyValuePair.substring(position + op.length()) - }; - //Cut from API-Gateway appended "=" - if ((">".equals(op) || "<".equals(op)) && keyVal[1].endsWith("=")) - keyVal[1] = keyVal[1].substring(0, keyVal[1].length() - 1); - - propertyQuery.setKey(spaceProperties ? keyVal[0] : getConvertedKey(keyVal[0])); - propertyQuery.setOperation(QueryOperation.fromInputRepresentation(op)); - String[] rawValues = keyVal[1].split(operatorComma); - - ArrayList values = new ArrayList<>(); - for (String rawValue : rawValues) - values.add(getConvertedValue(rawValue)); - - propertyQuery.setValues(values); - pql.add(propertyQuery); - } - }); - - PropertiesQuery pq = new PropertiesQuery(); - pq.add(pql); - - if (pq.stream().flatMap(List::stream).mapToLong(l -> l.getValues().size()).sum() == 0) - return null; - - return pq; - } - - private static String transformLegacyTags(String legacyTagsQuery) { - String[] tagQueryParts = legacyTagsQuery.split("="); - if (tagQueryParts.length != 2) - return legacyTagsQuery; - String tags = tagQueryParts[1]; - - return F_PREFIX + "tags" + "=cs=" + tags; - } - static Map getAdditionalParams(RoutingContext context, String type) throws Exception{ Map clusteringParams = context.get(type); @@ -514,13 +389,13 @@ static Map parseAdditionalParams(String query, String type) thro return; } String key = keyVal[0].substring(paramPrefix.length()); - Object value = getConvertedValue(keyVal[1]); + Object value = PropertiesQuery.getConvertedValue(keyVal[1]); try { validateAdditionalParams(type,key,value); }catch (Exception e){ throw new RuntimeException(e.getMessage()); } - cp.put(keyVal[0].substring(paramPrefix.length()), getConvertedValue(keyVal[1])); + cp.put(keyVal[0].substring(paramPrefix.length()), PropertiesQuery.getConvertedValue(keyVal[1])); } }); diff --git a/xyz-hub-service/src/main/java/com/here/xyz/hub/rest/ChangesetApi.java b/xyz-hub-service/src/main/java/com/here/xyz/hub/rest/ChangesetApi.java index 7b64442373..4116301ab6 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/hub/rest/ChangesetApi.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/hub/rest/ChangesetApi.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2017-2023 HERE Europe B.V. + * Copyright (C) 2017-2024 HERE Europe B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,9 +20,11 @@ package com.here.xyz.hub.rest; import static com.here.xyz.events.PropertyQuery.QueryOperation.LESS_THAN; +import static com.here.xyz.hub.rest.ApiParam.Path.VERSION; +import static com.here.xyz.hub.rest.ApiParam.Query.END_VERSION; +import static com.here.xyz.hub.rest.ApiParam.Query.START_VERSION; import static io.netty.handler.codec.http.HttpResponseStatus.NOT_FOUND; -import com.google.common.primitives.Longs; import com.here.xyz.events.DeleteChangesetsEvent; import com.here.xyz.events.GetChangesetStatisticsEvent; import com.here.xyz.events.IterateChangesetsEvent; @@ -30,7 +32,6 @@ import com.here.xyz.hub.Service; import com.here.xyz.hub.auth.Authorization; import com.here.xyz.hub.connectors.models.Space; -import com.here.xyz.hub.rest.ApiParam.Path; import com.here.xyz.hub.rest.ApiParam.Query; import com.here.xyz.hub.task.SpaceConnectorBasedHandler; import com.here.xyz.psql.query.IterateChangesets; @@ -48,118 +49,63 @@ public class ChangesetApi extends SpaceBasedApi { public ChangesetApi(RouterBuilder rb) { - rb.getRoute("getChangesets").setDoValidation(false).addHandler(this::getChangesets); - rb.getRoute("getChangeset").setDoValidation(false).addHandler(this::getChangeset); - rb.getRoute("deleteChangesets").setDoValidation(false).addHandler(this::deleteChangesets); - rb.getRoute("getChangesetStatistics").setDoValidation(false).addHandler(this::getChangesetStatistics); + rb.getRoute("getChangesets").setDoValidation(false).addHandler(handleErrors(this::getChangesets)); + rb.getRoute("getChangeset").setDoValidation(false).addHandler(handleErrors(this::getChangeset)); + rb.getRoute("deleteChangesets").setDoValidation(false).addHandler(handleErrors(this::deleteChangesets)); + rb.getRoute("getChangesetStatistics").setDoValidation(false).addHandler(handleErrors(this::getChangesetStatistics)); } /** * Get changesets by version */ - private void getChangeset(final RoutingContext context) { - try { - IterateChangesetsEvent event = buildIterateChangesetsEvent(context, false); - //TODO: Add static caching to this endpoint, once the execution pipelines have been refactored. - SpaceConnectorBasedHandler.execute(getMarker(context), - space -> Authorization.authorizeManageSpacesRights(context, space.getId(), space.getOwner()).map(space), event) - .onSuccess(result -> sendResponse(context,result)) - .onFailure(t -> this.sendErrorResponse(context, t)); + private void getChangesets(final RoutingContext context) { + long startVersion = getLongQueryParam(context, START_VERSION, 0); + long endVersion = getLongQueryParam(context, END_VERSION, -1); - } catch(HttpException e) { - sendErrorResponse(context, e); - } + if (endVersion != -1 && startVersion > endVersion) + throw new IllegalArgumentException("The parameter \"" + START_VERSION + "\" needs to be smaller than or equal to \"" + END_VERSION + "\"."); + + IterateChangesetsEvent event = buildIterateChangesetsEvent(context, startVersion, endVersion); + //TODO: Add static caching to this endpoint, once the execution pipelines have been refactored. + SpaceConnectorBasedHandler.execute(getMarker(context), + space -> Authorization.authorizeManageSpacesRights(context, space.getId(), space.getOwner()).map(space), event) + .onSuccess(result -> sendResponse(context, result)) + .onFailure(t -> sendErrorResponse(context, t)); } /** * Get changesets by version */ - private void getChangesets(final RoutingContext context) { - try { - IterateChangesetsEvent event = buildIterateChangesetsEvent(context, true); - //TODO: Add static caching to this endpoint, once the execution pipelines have been refactored. - SpaceConnectorBasedHandler.execute(getMarker(context), - space -> Authorization.authorizeManageSpacesRights(context, space.getId(), space.getOwner()).map(space), event) - .onSuccess(result -> sendResponse(context, result)) - .onFailure(t -> this.sendErrorResponse(context, t)); - - } catch(HttpException e) { - sendErrorResponse(context, e); - } - } - - private void sendResponse(final RoutingContext context, Object result){ - if(result instanceof Changeset && ((Changeset) result).getVersion() == -1){ - this.sendErrorResponse(context, new HttpException(NOT_FOUND, "The requested resource does not exist.")); - }else if(result instanceof ChangesetCollection && ((ChangesetCollection) result).getStartVersion() == -1 && - ((ChangesetCollection) result).getEndVersion() == -1){ - this.sendErrorResponse(context, new HttpException(NOT_FOUND, "The requested resource does not exist.")); - }else - this.sendResponseWithXyzSerialization(context, HttpResponseStatus.OK, result); - } - - private IterateChangesetsEvent buildIterateChangesetsEvent(final RoutingContext context, final boolean useChangesetCollection) throws HttpException { - final String pageToken = Query.getString(context, Query.PAGE_TOKEN, null); - final long limit = Query.getLong(context, Query.LIMIT, IterateChangesets.DEFAULT_LIMIT); - - final Long startVersion, endVersion; - - if (useChangesetCollection) { - startVersion = Query.getLong(context, Query.START_VERSION, 0L); - endVersion = Query.getLong(context, Query.END_VERSION, null); - - validateVersion(startVersion, true); - validateVersion(endVersion, false); - validateVersions(startVersion, endVersion); - } else { - final Long version = getVersionFromPath(context); - validateVersion(version, true); - startVersion = version; - endVersion = version; - } - - return new IterateChangesetsEvent() - .withSpace(getSpaceId(context)) - .withUseCollection(useChangesetCollection) - .withStartVersion(startVersion) - .withEndVersion(endVersion) - .withPageToken(pageToken) - .withLimit(limit); - } - - private Long getVersionFromPath(RoutingContext context) throws HttpException { - final Long version = Longs.tryParse(context.pathParam(Path.VERSION)); - if (version == null) - throw new HttpException(HttpResponseStatus.BAD_REQUEST, "Invalid version specified."); - - return version; - } - - private void validateVersion(Long version, boolean required) throws HttpException { - if (required && version == null) - throw new HttpException(HttpResponseStatus.BAD_REQUEST, "The parameter version is required."); - if (version != null && version < 0) - throw new HttpException(HttpResponseStatus.BAD_REQUEST, "Invalid version specified."); - } - - private void validateVersions(Long startVersion, Long endVersion) throws HttpException { - if (endVersion != null && startVersion > endVersion) - throw new HttpException(HttpResponseStatus.BAD_REQUEST, "The parameter startVersion needs to be smaller as endVersion."); + private void getChangeset(RoutingContext context) { + long version = getVersionFromPathParam(context); + IterateChangesetsEvent event = buildIterateChangesetsEvent(context, version, version); + //TODO: Add static caching to this endpoint, once the execution pipelines have been refactored. + SpaceConnectorBasedHandler.execute(getMarker(context), + space -> Authorization.authorizeManageSpacesRights(context, space.getId(), space.getOwner()).map(space), event) + .onSuccess(result -> { + ChangesetCollection changesets = (ChangesetCollection) result; + if (changesets.getVersions().isEmpty()) + sendErrorResponse(context, new HttpException(NOT_FOUND, "No changeset was found for version " + version)); + else + sendResponse(context, changesets.getVersions().get(version).withNextPageToken(changesets.getNextPageToken())); + }) + .onFailure(t -> sendErrorResponse(context, t)); } /** * Delete changesets by version number */ - private void deleteChangesets(final RoutingContext context) { + private void deleteChangesets(final RoutingContext context) throws HttpException { final String spaceId = getSpaceId(context); final PropertyQuery version = Query.getPropertyQuery(context.request().query(), "version", false); if (version == null || version.getValues().isEmpty()) { - this.sendErrorResponse(context, new HttpException(HttpResponseStatus.BAD_REQUEST, "Query parameter version is required")); + sendErrorResponse(context, new HttpException(HttpResponseStatus.BAD_REQUEST, "Query parameter version is required")); return; } else if (version.getOperation() != LESS_THAN) { - this.sendErrorResponse(context, new HttpException(HttpResponseStatus.BAD_REQUEST, "Only lower-than is allowed as operation for query parameter version")); + sendErrorResponse(context, + new HttpException(HttpResponseStatus.BAD_REQUEST, "Only lower-than is allowed as operation for query parameter version")); return; } @@ -174,29 +120,81 @@ else if (version.getOperation() != LESS_THAN) { .withSpace(spaceId) .withRequestedMinVersion(minVersion)) .onSuccess(result -> { - this.sendResponse(context, HttpResponseStatus.NO_CONTENT, null); + sendResponse(context, HttpResponseStatus.NO_CONTENT, null); Marker marker = getMarker(context); Service.spaceConfigClient.get(marker, spaceId) .compose(space -> Service.spaceConfigClient.store(marker, space.withMinVersion(minVersion))) .onSuccess(v -> logger.info(marker, "Updated minVersion for space {}", spaceId)) .onFailure(t -> logger.error(marker, "Error while updating minVersion for space {}", spaceId, t)); }) - .onFailure(t -> this.sendErrorResponse(context, t)); + .onFailure(t -> sendErrorResponse(context, t)); } catch (NumberFormatException e) { - this.sendErrorResponse(context, new HttpException(HttpResponseStatus.BAD_REQUEST, "Query parameter version must be a valid number larger than 0")); + throw new HttpException(HttpResponseStatus.BAD_REQUEST, "Query parameter version must be a valid number larger than 0"); } } private void getChangesetStatistics(final RoutingContext context) { - final Function> changesetAuthorization = space -> Authorization.authorizeManageSpacesRights(context, space.getId(), space.getOwner()).map(space); + final Function> changesetAuthorization = space -> Authorization.authorizeManageSpacesRights(context, space.getId(), + space.getOwner()).map(space); getChangesetStatistics(getMarker(context), changesetAuthorization, getSpaceId(context)) .onSuccess(result -> sendResponse(context, HttpResponseStatus.OK, result)) .onFailure(t -> sendErrorResponse(context, t)); } - public static Future getChangesetStatistics(Marker marker, Function> authorizationFunction, String spaceId) { + private IterateChangesetsEvent buildIterateChangesetsEvent(final RoutingContext context, long startVersion, long endVersion) { + String pageToken = Query.getString(context, Query.PAGE_TOKEN, null); + long limit = Query.getLong(context, Query.LIMIT, IterateChangesets.DEFAULT_LIMIT); + + return new IterateChangesetsEvent() + .withSpace(getSpaceId(context)) + .withStartVersion(startVersion) + .withEndVersion(endVersion) + .withPageToken(pageToken) + .withLimit(limit); + } + + private long getLongQueryParam(RoutingContext context, String paramName, long defaultValue) { + try { + long paramValue = Query.getLong(context, paramName); + if (paramValue < 0) + throw new IllegalArgumentException("The parameter \"" + paramName + "\" must be >= 0."); + return paramValue; + } + catch (NullPointerException e) { + return defaultValue; + } + catch (NumberFormatException e) { + throw new IllegalArgumentException("The parameter \"" + paramName + "\" is not a number.", e); + } + } + + private void sendResponse(final RoutingContext context, Object result) { + if (result instanceof Changeset && ((Changeset) result).getVersion() == -1) + sendErrorResponse(context, new HttpException(NOT_FOUND, "The requested resource does not exist.")); + else if (result instanceof ChangesetCollection && ((ChangesetCollection) result).getStartVersion() == -1 && + ((ChangesetCollection) result).getEndVersion() == -1) + sendErrorResponse(context, new HttpException(NOT_FOUND, "The requested resource does not exist.")); + else + sendResponseWithXyzSerialization(context, HttpResponseStatus.OK, result); + } + + private long getVersionFromPathParam(RoutingContext context) { + String versionParamValue = context.pathParam(VERSION); + if (versionParamValue == null) + throw new IllegalArgumentException("The parameter \"" + VERSION + "\" is required."); + + try { + return Long.parseLong(versionParamValue); + } + catch (NumberFormatException e) { + throw new IllegalArgumentException("The parameter \"" + VERSION + "\" is not a number.", e); + } + } + + public static Future getChangesetStatistics(Marker marker, + Function> authorizationFunction, String spaceId) { return SpaceConnectorBasedHandler.execute(marker, authorizationFunction, new GetChangesetStatisticsEvent().withSpace(spaceId)); } } diff --git a/xyz-hub-service/src/main/java/com/here/xyz/hub/rest/FeatureQueryApi.java b/xyz-hub-service/src/main/java/com/here/xyz/hub/rest/FeatureQueryApi.java index 68b211abd0..5037514148 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/hub/rest/FeatureQueryApi.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/hub/rest/FeatureQueryApi.java @@ -49,14 +49,14 @@ import com.here.xyz.hub.task.FeatureTask.IterateQuery; import com.here.xyz.hub.task.FeatureTask.SearchQuery; import com.here.xyz.hub.task.FeatureTask.TileQuery; -import com.here.xyz.hub.util.geo.GeoTools; +import com.here.xyz.util.geo.GeoTools; import com.here.xyz.models.geojson.HQuad; import com.here.xyz.models.geojson.WebMercatorTile; import com.here.xyz.models.geojson.coordinates.BBox; import com.here.xyz.models.geojson.exceptions.InvalidGeometryException; import com.here.xyz.models.geojson.implementation.Geometry; -import com.here.xyz.models.geojson.implementation.Point; import com.here.xyz.models.hub.Ref; +import com.here.xyz.util.geo.GeometryValidator; import com.here.xyz.util.service.HttpException; import io.vertx.core.http.HttpMethod; import io.vertx.ext.web.ParsedHeaderValue; @@ -226,17 +226,14 @@ public void getFeaturesBySpatial(final RoutingContext context) { .withContext(spaceContext) .withRef(getRef(context)); - - if( event.getGeometry() != null && !( (event.getGeometry() instanceof Point) && event.getRadius() == 0 ) ) - { boolean bCrossDateLine = false; - try - { bCrossDateLine = GeoTools.geometryCrossesDateline(event.getGeometry(), event.getRadius()); } - catch (Exception e) - { throw new HttpException(BAD_REQUEST,e.getMessage()); } - - if( bCrossDateLine ) - throw new HttpException(BAD_REQUEST, "Invalid arguments! geometry filter intersects with antimeridian"); - } + try { + //If a h3 reference got provided - we do not need to validate the Geometry + //If there is a referenced feature - the geometry validation happens in FeatureTask - after the geometry is resolved. + if(h3Index == null && refFeatureId == null && refSpaceId == null) + GeometryValidator.validateGeometry(event.getGeometry(), event.getRadius()); + }catch (GeometryValidator.GeometryException e){ + throw new HttpException(BAD_REQUEST ,e.getMessage()); + } final GeometryQuery task = new GeometryQuery(event, context, ApiResponseType.FEATURE_COLLECTION, skipCache, refSpaceId, refFeatureId); task.execute(this::sendResponse, this::sendErrorResponse); diff --git a/xyz-hub-service/src/main/java/com/here/xyz/hub/task/FeatureTask.java b/xyz-hub-service/src/main/java/com/here/xyz/hub/task/FeatureTask.java index 51099d863a..6f8894c07e 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/hub/task/FeatureTask.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/hub/task/FeatureTask.java @@ -60,7 +60,9 @@ import com.here.xyz.hub.util.diff.Patcher.ConflictResolution; import com.here.xyz.models.geojson.implementation.Feature; import com.here.xyz.models.geojson.implementation.FeatureCollection; +import com.here.xyz.models.geojson.implementation.Geometry; import com.here.xyz.responses.XyzResponse; +import com.here.xyz.util.geo.GeometryValidator; import com.here.xyz.util.service.HttpException; import io.vertx.core.AsyncResult; import io.vertx.ext.web.RoutingContext; @@ -367,9 +369,11 @@ void processGetRefFeatureResponse(Callback callback, AsyncResult< final FeatureCollection collection = (FeatureCollection) response; final List features = collection.getFeatures(); - if (features.size() == 1) + if (features.size() == 1) { + Geometry geometry = features.get(0).getGeometry(); + GeometryValidator.validateGeometry(geometry, getEvent().getRadius()); getEvent().setGeometry(features.get(0).getGeometry()); - + } callback.call(this); } catch (Exception e) { diff --git a/xyz-hub-service/src/main/java/com/here/xyz/hub/task/FeatureTaskHandler.java b/xyz-hub-service/src/main/java/com/here/xyz/hub/task/FeatureTaskHandler.java index 0363900bcc..97a932e80c 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/hub/task/FeatureTaskHandler.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/hub/task/FeatureTaskHandler.java @@ -1559,17 +1559,6 @@ public static > void validate(X task, Callback ca } } - if (task.getEvent() instanceof GetFeaturesByGeometryEvent ev && ev.getGeometry() != null ) { - // DS-641 - /spatial - restrict post/ref geom to max. 12000 coords - final int MAX_NR_COORDINATES = 12000; - int nrCoordinates = ev.getGeometry().getJTSGeometry().getNumPoints(); - if( MAX_NR_COORDINATES < nrCoordinates ) - { - callback.exception( new HttpException(BAD_REQUEST, String.format("Invalid arguments! Geometry exceeds %d coordinates < %d coordinates", MAX_NR_COORDINATES, nrCoordinates) ) ); - return; - } - } - callback.call(task); } diff --git a/xyz-hub-service/src/main/java/com/here/xyz/hub/util/geo/MvtTileBuilder.java b/xyz-hub-service/src/main/java/com/here/xyz/hub/util/geo/MvtTileBuilder.java index 96825d9c24..8e5745cda8 100644 --- a/xyz-hub-service/src/main/java/com/here/xyz/hub/util/geo/MvtTileBuilder.java +++ b/xyz-hub-service/src/main/java/com/here/xyz/hub/util/geo/MvtTileBuilder.java @@ -20,12 +20,13 @@ package com.here.xyz.hub.util.geo; -import static com.here.xyz.hub.util.geo.GeoTools.WEB_MERCATOR_EPSG; -import static com.here.xyz.hub.util.geo.GeoTools.WGS84_EPSG; +import static com.here.xyz.util.geo.GeoTools.WEB_MERCATOR_EPSG; +import static com.here.xyz.util.geo.GeoTools.WGS84_EPSG; import com.here.xyz.models.geojson.WebMercatorTile; import com.here.xyz.models.geojson.implementation.Feature; import com.here.xyz.models.geojson.implementation.Geometry; +import com.here.xyz.util.geo.GeoTools; import com.wdtinc.mapbox_vector_tile.VectorTile; import com.wdtinc.mapbox_vector_tile.VectorTile.Tile; import com.wdtinc.mapbox_vector_tile.adapt.jts.IGeometryFilter; diff --git a/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/ApiParamTest.java b/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/ApiParamTest.java index fe8977da88..b1dfb1bfdf 100644 --- a/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/ApiParamTest.java +++ b/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/ApiParamTest.java @@ -34,7 +34,7 @@ public class ApiParamTest { @Test public void parsePropertiesQuery() { String URIquery = "a=1&b=2&p.a=3&p.b>4.1&p.boolean=true&f.createdAt>0&p.testString=string,\"5\""; - PropertiesQuery pq = Query.parsePropertiesQuery(URIquery, "", false); + PropertiesQuery pq = PropertiesQuery.fromString(URIquery, "", false); assertEquals("1 OR block is expected", 1, pq.size()); PropertyQueryList pql = pq.get(0); @@ -76,7 +76,7 @@ public void parsePropertiesQuery() { public void parsePropertiesQuerySpace() { // equals String URISpaceQuery = "a=1&b=2&contentUpatedAt=3"; - PropertiesQuery pq = Query.parsePropertiesQuery(URISpaceQuery, "contentUpatedAt", true); + PropertiesQuery pq = PropertiesQuery.fromString(URISpaceQuery, "contentUpatedAt", true); assertEquals("1 OR block is expected", 1, pq.size()); PropertyQueryList pql = pq.get(0); @@ -89,7 +89,7 @@ public void parsePropertiesQuerySpace() { // equals with OR URISpaceQuery = "a=1&b=2&contentUpatedAt=3,4"; - pq = Query.parsePropertiesQuery(URISpaceQuery, "contentUpatedAt", true); + pq = PropertiesQuery.fromString(URISpaceQuery, "contentUpatedAt", true); pql = pq.get(0); query = pql.stream().filter(q -> q.getKey().equals("contentUpatedAt")).findFirst().get(); @@ -100,7 +100,7 @@ public void parsePropertiesQuerySpace() { // not equals URISpaceQuery = "a=1&b=2&contentUpatedAt!=3"; - pq = Query.parsePropertiesQuery(URISpaceQuery, "contentUpatedAt", true); + pq = PropertiesQuery.fromString(URISpaceQuery, "contentUpatedAt", true); pql = pq.get(0); query = pql.stream().filter(q -> q.getKey().equals("contentUpatedAt")).findFirst().get(); @@ -110,7 +110,7 @@ public void parsePropertiesQuerySpace() { // greater URISpaceQuery = "a=1&b=2&contentUpatedAt>3"; - pq = Query.parsePropertiesQuery(URISpaceQuery, "contentUpatedAt", true); + pq = PropertiesQuery.fromString(URISpaceQuery, "contentUpatedAt", true); pql = pq.get(0); query = pql.stream().filter(q -> q.getKey().equals("contentUpatedAt")).findFirst().get(); @@ -120,7 +120,7 @@ public void parsePropertiesQuerySpace() { // greater equals URISpaceQuery = "a=1&b=2&contentUpatedAt>=3"; - pq = Query.parsePropertiesQuery(URISpaceQuery, "contentUpatedAt", true); + pq = PropertiesQuery.fromString(URISpaceQuery, "contentUpatedAt", true); pql = pq.get(0); query = pql.stream().filter(q -> q.getKey().equals("contentUpatedAt")).findFirst().get(); @@ -130,7 +130,7 @@ public void parsePropertiesQuerySpace() { // less URISpaceQuery = "a=1&b=2&contentUpatedAt<3"; - pq = Query.parsePropertiesQuery(URISpaceQuery, "contentUpatedAt", true); + pq = PropertiesQuery.fromString(URISpaceQuery, "contentUpatedAt", true); pql = pq.get(0); query = pql.stream().filter(q -> q.getKey().equals("contentUpatedAt")).findFirst().get(); @@ -140,7 +140,7 @@ public void parsePropertiesQuerySpace() { // less equals URISpaceQuery = "a=1&b=2&contentUpatedAt<=3"; - pq = Query.parsePropertiesQuery(URISpaceQuery, "contentUpatedAt", true); + pq = PropertiesQuery.fromString(URISpaceQuery, "contentUpatedAt", true); pql = pq.get(0); query = pql.stream().filter(q -> q.getKey().equals("contentUpatedAt")).findFirst().get(); diff --git a/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/ModifyCompositeSpaceIT.java b/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/ModifyCompositeSpaceIT.java index b1d1d26a4a..b54beed5fb 100644 --- a/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/ModifyCompositeSpaceIT.java +++ b/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/ModifyCompositeSpaceIT.java @@ -203,7 +203,7 @@ public void updateExtendsCyclicReference2Levels() { } @Test - public void deactivateCompositeSpacesOnParentDelete() { + public void deactivateCompositeSpacesOnParentDelete() throws InterruptedException { removeSpace("x-psql-test"); given() @@ -221,6 +221,9 @@ public void deactivateCompositeSpacesOnParentDelete() { .then() .statusCode(PRECONDITION_REQUIRED.code()); + //Takes some time till parent deletion got propagated. If we are to fast - the db-query will hit the delted table. + Thread.sleep(100); + given() .headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)) .when() diff --git a/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/PropertiesSearchIT.java b/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/PropertiesSearchIT.java index 0a101ed4fb..891f7b90d8 100644 --- a/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/PropertiesSearchIT.java +++ b/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/PropertiesSearchIT.java @@ -19,6 +19,8 @@ package com.here.xyz.hub.rest; +import static com.here.xyz.hub.auth.TestAuthenticator.AuthProfile.ACCESS_OWNER_1_ADMIN; +import static com.here.xyz.hub.connectors.test.TestStorageConnector.ILLEGAL_ARGUMENT_SPACE; import static com.here.xyz.util.service.BaseHttpServerVerticle.HeaderValues.APPLICATION_GEO_JSON; import static com.here.xyz.util.service.BaseHttpServerVerticle.HeaderValues.APPLICATION_JSON; import static io.netty.handler.codec.http.HttpResponseStatus.OK; @@ -27,6 +29,7 @@ import static org.hamcrest.Matchers.equalTo; import io.restassured.RestAssured; +import io.vertx.core.json.JsonObject; import java.util.concurrent.TimeUnit; import org.awaitility.Durations; import org.junit.AfterClass; @@ -52,433 +55,431 @@ public static void tearDown() { @Test public void testContains() { - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.stringArray@>foo1"). - then(). - body("features.size()", equalTo(1)); - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.stringArray=cs=foo2"). - then(). - body("features.size()", equalTo(2)); + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.stringArray@>foo1") + .then() + .body("features.size()", equalTo(1)); + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.stringArray=cs=foo2") + .then() + .body("features.size()", equalTo(2)); boolean originalUrlEncodingValue = RestAssured.urlEncodingEnabled; RestAssured.urlEncodingEnabled = false; - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.stringArray=cs=foo1,NA"). - then(). - body("features.size()", equalTo(1)); + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.stringArray=cs=foo1,NA") + .then() + .body("features.size()", equalTo(1)); RestAssured.urlEncodingEnabled = originalUrlEncodingValue; - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.stringArray=cs=NA"). - then(). - body("features.size()", equalTo(0)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.intArray@>1"). - then(). - body("features.size()", equalTo(1)); - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.intArray=cs=2"). - then(). - body("features.size()", equalTo(2)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - queryParam("p.objectArray@>{\"foo1\":1}"). - get(getSpacesPath() + "/x-psql-test/search"). - then(). - body("features.size()", equalTo(1)); - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - queryParam("p.objectArray=cs={\"foo2\":2}"). - get(getSpacesPath() + "/x-psql-test/search"). - then(). - body("features.size()", equalTo(2)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - queryParam("p.nestedObjectArray@>{\"foo\":{\"foo1\":{\"foo2\":2}}}"). - get(getSpacesPath() + "/x-psql-test/search"). - then(). - body("features.size()", equalTo(1)); + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.stringArray=cs=NA") + .then() + .body("features.size()", equalTo(0)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.intArray@>1") + .then() + .body("features.size()", equalTo(1)); + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.intArray=cs=2") + .then() + .body("features.size()", equalTo(2)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .queryParam("p.objectArray@>{\"foo1\":1}") + .get(getSpacesPath() + "/x-psql-test/search") + .then() + .body("features.size()", equalTo(1)); + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .queryParam("p.objectArray=cs={\"foo2\":2}") + .get(getSpacesPath() + "/x-psql-test/search") + .then() + .body("features.size()", equalTo(2)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .queryParam("p.nestedObjectArray@>{\"foo\":{\"foo1\":{\"foo2\":2}}}") + .get(getSpacesPath() + "/x-psql-test/search") + .then() + .body("features.size()", equalTo(1)); } @Test public void testGreaterThan() { - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.capacity>50000"). - then(). - body("features.size()", equalTo(133)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.capacity=gt=50000"). - then(). - body("features.size()", equalTo(133)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.capacity>50000="). - then(). - body("features.size()", equalTo(133)); + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.capacity>50000") + .then() + .body("features.size()", equalTo(133)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.capacity=gt=50000") + .then() + .body("features.size()", equalTo(133)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.capacity>50000=") + .then() + .body("features.size()", equalTo(133)); } @Test public void testGreaterThanEquals() { - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.capacity>=50000"). - then(). - body("features.size()", equalTo(150)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.capacity=gte=50000"). - then(). - body("features.size()", equalTo(150)); + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.capacity>=50000") + .then() + .body("features.size()", equalTo(150)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.capacity=gte=50000") + .then() + .body("features.size()", equalTo(150)); } @Test public void testLessThan() { - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.capacity<50000"). - then(). - body("features.size()", equalTo(102)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.capacity=lt=50000"). - then(). - body("features.size()", equalTo(102)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.capacity<50000="). - then(). - body("features.size()", equalTo(102)); + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.capacity<50000") + .then() + .body("features.size()", equalTo(102)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.capacity=lt=50000") + .then() + .body("features.size()", equalTo(102)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.capacity<50000=") + .then() + .body("features.size()", equalTo(102)); } @Test public void testLessThanEquals() { - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.capacity<=50000"). - then(). - body("features.size()", equalTo(119)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.capacity=lte=50000"). - then(). - body("features.size()", equalTo(119)); + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.capacity<=50000") + .then() + .body("features.size()", equalTo(119)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.capacity=lte=50000") + .then() + .body("features.size()", equalTo(119)); } @Test public void testEquals() { - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.capacity=50000"). - then(). - body("features.size()", equalTo(17)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.name=Arizona Stadium"). - then(). - body("features.size()", equalTo(1)). - body("features[0].properties.name", equalTo("Arizona Stadium")); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.name="). - then(). - body("features.size()", equalTo(1)). - body("features[0].properties.name", equalTo("")); + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.capacity=50000") + .then() + .body("features.size()", equalTo(17)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.name=Arizona Stadium") + .then() + .body("features.size()", equalTo(1)) + .body("features[0].properties.name", equalTo("Arizona Stadium")); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.name=") + .then() + .body("features.size()", equalTo(1)) + .body("features[0].properties.name", equalTo("")); } @Test public void testEqualsWithComma() { - given(). - urlEncodingEnabled(false). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.sport=association%20football,American%20football"). - then(). - body("features.size()", equalTo(206)); - - given(). - contentType(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - body("{\"type\": \"Feature\", \"properties\": {\"sport\": \"association, football\"}}"). - patch(getSpacesPath() + "/x-psql-test/features/Q2736585"). - then(). - statusCode(OK.code()); - - given(). - urlEncodingEnabled(false). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.sport=association,%20football"). - then(). - body("features.size()", equalTo(0)); - - given(). - urlEncodingEnabled(false). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.sport=association%2C%20football"). - then(). - body("features.size()", equalTo(1)); + given() + .urlEncodingEnabled(false) + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.sport=association%20football,American%20football") + .then() + .body("features.size()", equalTo(206)); + + given() + .contentType(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .body("{\"type\": \"Feature\", \"properties\": {\"sport\": \"association, football\"}}") + .patch(getSpacesPath() + "/x-psql-test/features/Q2736585") + .then() + .statusCode(OK.code()); + + given() + .urlEncodingEnabled(false) + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.sport=association,%20football") + .then() + .body("features.size()", equalTo(0)); + + given() + .urlEncodingEnabled(false) + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.sport=association%2C%20football") + .then() + .body("features.size()", equalTo(1)); } @Test public void testNotEquals() { - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.capacity!=50000"). - then(). - body("features.size()", equalTo(235)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.name!=Arizona Stadium"). - then(). - body("features.size()", equalTo(251)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.name!="). - then(). - body("features.size()", equalTo(251)); + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.capacity!=50000") + .then() + .body("features.size()", equalTo(235)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.name!=Arizona Stadium") + .then() + .body("features.size()", equalTo(251)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.name!=") + .then() + .body("features.size()", equalTo(251)); } @Test public void errorTest() { - given(). - contentType(APPLICATION_JSON). - accept(APPLICATION_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - body(content("/xyz/hub/createErrorTestSpace.json")). - when().post(getCreateSpacePath("illegal_argument")).then(); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/illegal_argument/search?p.capacity=gt=50000"). - then(). - statusCode(400); - - removeSpace("illegal_argument"); + cleanUpId = ILLEGAL_ARGUMENT_SPACE; + createSpace(new JsonObject() + .put("id", ILLEGAL_ARGUMENT_SPACE) + .put("title", "My Demo Space") + .put("storage", new JsonObject().put("id", "test")) + .toString()); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/" + ILLEGAL_ARGUMENT_SPACE + "/search?p.capacity=gt=50000") + .then() + .statusCode(400); } @Test public void testEqualsWithSystemProperty() { - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?f.id=Q1370732"). - then(). - body("features.size()", equalTo(1)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?f.id='Q1370732'"). - then(). - body("features.size()", equalTo(1)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?f.id=33333333"). - then(). - body("features.size()", equalTo(0)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?f.createdAt<=" + System.currentTimeMillis()). - then(). - body("features.size()", equalTo(252)); + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?f.id=Q1370732") + .then() + .body("features.size()", equalTo(1)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?f.id='Q1370732'") + .then() + .body("features.size()", equalTo(1)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?f.id=33333333") + .then() + .body("features.size()", equalTo(0)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?f.createdAt<=" + System.currentTimeMillis()) + .then() + .body("features.size()", equalTo(252)); } @Test public void testSpecialCharactersProperty() { - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.sport=association = football"). - then(). - body("features.size()", equalTo(1)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.sport=association <= football"). - then(). - body("features.size()", equalTo(1)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.sport=association =gte= football"). - then(). - body("features.size()", equalTo(1)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.sport=association --> football"). - then(). - body("features.size()", equalTo(1)); + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.sport=association = football") + .then() + .body("features.size()", equalTo(1)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.sport=association <= football") + .then() + .body("features.size()", equalTo(1)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.sport=association =gte= football") + .then() + .body("features.size()", equalTo(1)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.sport=association --> football") + .then() + .body("features.size()", equalTo(1)); } @Test public void testSearchWithoutValues() { - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.sport=&p.sport>=&p.sport=gte=&foo=bar"). - then(). - body("features.size()", equalTo(0)); + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.sport=&p.sport>=&p.sport=gte=&foo=bar") + .then() + .body("features.size()", equalTo(0)); } - /* + /* * Test is commented out because it takes too long to execute, since the indexes should be created by the connector for the test be valid. * Only kept here for future reference */ //@Test - public void testCreatedAtAndUpdatedAtWith10ThousandFeaturesPlus() throws Exception { + public void testCreatedAtAndUpdatedAtWith10ThousandFeaturesPlus() throws Exception { add10ThousandFeatures(); await() .atMost(1, TimeUnit.MINUTES) .pollInterval(Durations.ONE_SECOND) .until(() -> - "PARTIAL".equals(given(). - accept(APPLICATION_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/statistics"). - then().extract().body().path("properties.searchable") - )); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?f.createdAt>0?limit=1"). - then(). - body("features.size()", equalTo(1)); - - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?f.updatedAt>0?limit=1"). - then(). - body("features.size()", equalTo(1)); + "PARTIAL".equals(given(). + accept(APPLICATION_JSON). + headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)). + when(). + get(getSpacesPath() + "/x-psql-test/statistics"). + then().extract().body().path("properties.searchable") + )); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?f.createdAt>0?limit=1") + .then() + .body("features.size()", equalTo(1)); + + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?f.updatedAt>0?limit=1") + .then() + .body("features.size()", equalTo(1)); } -/* - * Test is commented out because it takes too long to execute. s. testCreatedAtAndUpdatedAtWith10ThousandFeaturesPlus - */ + /* + * Test is commented out because it takes too long to execute. s. testCreatedAtAndUpdatedAtWith10ThousandFeaturesPlus + */ //@Test // test for DS-380 - search on none indexed propery not allowed - uncommented due to runtime. -public void test_DS380() throws Exception { + public void test_DS380() throws Exception { add10ThousandFeatures2(); await() .atMost(3, TimeUnit.MINUTES) .pollInterval(Durations.TEN_SECONDS) .until(() -> - "PARTIAL".equals(given(). - accept(APPLICATION_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/statistics").prettyPeek(). - then().extract().body().path("properties.searchable") - )); - -// test search not allowd for p.NonIndexed - given(). - accept(APPLICATION_GEO_JSON). - headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)). - when(). - get(getSpacesPath() + "/x-psql-test/search?p.NonIndexed>800?limit=1"). - then().statusCode(400); + "PARTIAL".equals(given(). + accept(APPLICATION_JSON). + headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)). + when(). + get(getSpacesPath() + "/x-psql-test/statistics").prettyPeek(). + then().extract().body().path("properties.searchable") + )); + +// test search not allowd for p.NonIndexed + given() + .accept(APPLICATION_GEO_JSON) + .headers(getAuthHeaders(ACCESS_OWNER_1_ADMIN)) + .when() + .get(getSpacesPath() + "/x-psql-test/search?p.NonIndexed>800?limit=1") + .then().statusCode(400); } } diff --git a/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/TestWithSpaceCleanup.java b/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/TestWithSpaceCleanup.java index b38dcc8872..8c8f895b74 100644 --- a/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/TestWithSpaceCleanup.java +++ b/xyz-hub-test/src/test/java/com/here/xyz/hub/rest/TestWithSpaceCleanup.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2017-2023 HERE Europe B.V. + * Copyright (C) 2017-2024 HERE Europe B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,9 +41,9 @@ protected static String getCreateSpacePath() { return getCreateSpacePath(spaceName); } - static String getCreateSpacePath(String spaceName) { + static String getCreateSpacePath(String spaceId) { if (System.getenv().containsKey("SPACES_PATH")) - return getSpacesPath() + (Strings.isNullOrEmpty(spaceName) ? "" : ("/" + spaceName)); + return getSpacesPath() + (Strings.isNullOrEmpty(spaceId) ? "" : ("/" + spaceId)); return getSpacesPath(); } @@ -57,8 +57,7 @@ protected static String getSpaceId() { @After public void tearDownTest() { - if (cleanUpId != null) { + if (cleanUpId != null) removeSpace(cleanUpId); - } } } diff --git a/xyz-hub-test/src/test/resources/xyz/hub/createErrorTestSpace.json b/xyz-hub-test/src/test/resources/xyz/hub/createErrorTestSpace.json deleted file mode 100644 index ddd9237586..0000000000 --- a/xyz-hub-test/src/test/resources/xyz/hub/createErrorTestSpace.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "id": "illegal_argument", - "title": "My Demo Space", - "client": {} -} \ No newline at end of file diff --git a/xyz-jobs/pom.xml b/xyz-jobs/pom.xml index 3bb2917e11..b2814bede0 100644 --- a/xyz-jobs/pom.xml +++ b/xyz-jobs/pom.xml @@ -47,7 +47,54 @@ + + org.apache.maven.plugins + maven-shade-plugin + + + + *:* + + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + **/Log4j2Plugins.dat + + + + org.apache.logging.log4j:log4j-jcl + + ** + + + + commons-logging:commons-logging + + ** + + + + false + + + com.here.xyz.jobs.service.JobService + + true + + + + + ${project.artifactId} + + + + package + + shade + + + + - \ No newline at end of file diff --git a/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/JobPlayground.java b/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/JobPlayground.java index a6b070cb11..de68091c0c 100644 --- a/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/JobPlayground.java +++ b/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/JobPlayground.java @@ -35,6 +35,7 @@ import com.here.xyz.jobs.datasets.Files; import com.here.xyz.jobs.datasets.files.FileInputSettings; import com.here.xyz.jobs.datasets.files.GeoJson; +import com.here.xyz.jobs.datasets.space.UpdateStrategy; import com.here.xyz.jobs.service.Config; import com.here.xyz.jobs.steps.StepGraph; import com.here.xyz.jobs.steps.execution.LambdaBasedStep; @@ -46,6 +47,7 @@ import com.here.xyz.jobs.steps.impl.DropIndexes; import com.here.xyz.jobs.steps.impl.MarkForMaintenance; import com.here.xyz.jobs.steps.impl.transport.CopySpace; +import com.here.xyz.jobs.steps.impl.transport.ExportSpaceToFiles; import com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace; import com.here.xyz.jobs.steps.inputs.Input; import com.here.xyz.jobs.steps.outputs.Output; @@ -58,6 +60,7 @@ import com.here.xyz.models.hub.Space; import com.here.xyz.util.ARN; import com.here.xyz.util.db.pg.XyzSpaceTableHelper.Index; +import com.here.xyz.util.runtime.LambdaFunctionRuntime; import com.here.xyz.util.service.Core; import com.here.xyz.util.service.aws.SimulatedContext; import com.here.xyz.util.web.HubWebClient; @@ -102,6 +105,7 @@ import software.amazon.awssdk.services.lambda.model.InvokeRequest; import software.amazon.awssdk.services.lambda.model.InvokeResponse; +//TODO: Delete or extend JopTestBase and cleanUp. public class JobPlayground { private static final Logger logger = LogManager.getLogger(); private static HubWebClient hubWebClient; @@ -109,12 +113,12 @@ public class JobPlayground { private static Space sampleSpace; private static Space targetSpace; private static boolean simulateExecution = true; - private static boolean executeWholeJob = true; + private static boolean executeWholeJob = false; private static ImportFilesToSpace.Format importFormat = ImportFilesToSpace.Format.GEOJSON; private static int uploadFileCount = 2; private static String jobServiceBaseUrl = "http://localhost:7070"; - private static Usecase playgroundUsecase = Usecase.IMPORT; + private static Usecase playgroundUsecase = Usecase.EXPORT; private enum Usecase { IMPORT, @@ -189,7 +193,7 @@ private static void init() throws WebClientException, JsonProcessingException { mockJob = new Job().create() .withDescription("Sample export job") .withOwner("me") - .withSource(new DatasetDescription.Space<>().withId(sampleSpace.getId())) + .withSource(new DatasetDescription.Space<>().withId("REPLACE_WITH_EXISITNG")) .withTarget(new Files<>().withOutputSettings(new FileOutputSettings().withFormat(new GeoJson()))); } } @@ -212,14 +216,14 @@ public static void main(String[] args) throws IOException, InterruptedException, else init(); - startRealJob(realJobSourceSpaceId, realJobTargetSpaceId); +// startRealJob(realJobSourceSpaceId, realJobTargetSpaceId); + + init(); -// init(); -// -// if (executeWholeJob) -// startMockJob(); -// else -// startLambdaExecutions(); + if (executeWholeJob) + startMockJob(); + else + startLambdaExecutions(); } private static void startLambdaExecutions() throws IOException { @@ -238,6 +242,8 @@ private static void startLambdaExecutions() throws IOException { runMarkForMaintenanceStep(sampleSpace.getId()); }else if(playgroundUsecase.equals(Usecase.COPY)) { runCopySpaceStep(sampleSpace.getId(), targetSpace.getId()); + }else if(playgroundUsecase.equals(Usecase.EXPORT)) { + runExportSpaceToFilesStep(sampleSpace.getId()); } } @@ -323,7 +329,7 @@ private static String generateContentLine(ImportFilesToSpace.Format format, int else if(format.equals(ImportFilesToSpace.Format.CSV_GEOJSON)) return "\"{'\"type'\":'\"Feature'\",'\"geometry'\":{'\"type'\":'\"Point'\",'\"coordinates'\":["+(rd.nextInt(179))+"."+(rd.nextInt(100))+","+(rd.nextInt(79))+"."+(rd.nextInt(100))+"]},'\"properties'\":{'\"test'\":"+i+"}}\""+lineSeparator; else - return "{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":["+(rd.nextInt(179))+"."+(rd.nextInt(100))+","+(rd.nextInt(79))+"."+(rd.nextInt(100))+"]},\"properties\":{\"test\":"+i+"}}"+lineSeparator; + return "{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":["+(rd.nextInt(179))+"."+(rd.nextInt(100))+","+(rd.nextInt(79))+"."+(rd.nextInt(100))+"]},\"properties\":{\"te\\\"st\":"+i+"}}"+lineSeparator; } private static void startMockJob() { @@ -471,7 +477,7 @@ public static void runDropIndexStep(String spaceId) throws IOException { } public static void runImportFilesToSpaceStep(String spaceId, ImportFilesToSpace.Format format) throws IOException { - runStep(new ImportFilesToSpace().withSpaceId(spaceId).withFormat(format)); + runStep(new ImportFilesToSpace().withSpaceId(spaceId).withFormat(format).withUpdateStrategy(UpdateStrategy.DEFAULT_UPDATE_STRATEGY)); } public static void runCreateIndexStep(String spaceId, Index index) throws IOException { @@ -490,6 +496,10 @@ public static void runCopySpaceStep(String sourceSpaceId, String targetSpaceId) runStep(new CopySpace().withSpaceId(sourceSpaceId).withTargetSpaceId(targetSpaceId).withSourceVersionRef(new Ref("HEAD"))); } + public static void runExportSpaceToFilesStep(String sourceSpaceId) throws IOException { + runStep(new ExportSpaceToFiles().withSpaceId(sourceSpaceId)); + } + private static void runStep(LambdaBasedStep step) throws IOException { if (simulateExecution) { simulateLambdaStepRequest(step, START_EXECUTION); @@ -507,6 +517,7 @@ private static void runStep(LambdaBasedStep step) throws IOException { private static void simulateLambdaStepRequest(LambdaBasedStep step, RequestType requestType) throws IOException { OutputStream os = new ByteArrayOutputStream(); Context ctx = new SimulatedContext("localLambda", null); + new LambdaFunctionRuntime(ctx, step.getGlobalStepId()); final LambdaStepRequest request = prepareStepRequestPayload(step, requestType); new LambdaBasedStepExecutor().handleRequest(new ByteArrayInputStream(request.toByteArray()), os, ctx); diff --git a/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/datasets/filters/Filters.java b/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/datasets/filters/Filters.java index f682720669..eaff9237e6 100644 --- a/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/datasets/filters/Filters.java +++ b/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/datasets/filters/Filters.java @@ -25,17 +25,20 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonView; +import com.here.xyz.XyzSerializable.Internal; import com.here.xyz.XyzSerializable.Public; import com.here.xyz.XyzSerializable.Static; import com.here.xyz.events.ContextAwareEvent.SpaceContext; +import com.here.xyz.events.PropertiesQuery; import com.here.xyz.util.Hasher; @JsonIgnoreProperties(ignoreUnknown = true) public class Filters { - - //@TODO: Copy Filters to old impl V1 + Rewrite V2 that propertyFilter uses modelBase approach @JsonView({Public.class}) - private String propertyFilter; + private PropertiesQuery propertyFilter; + //TODO: Remove after V1 got shutdown + @JsonView({Internal.class, Static.class}) + private String propertyFilterAsString; @JsonView({Public.class}) private SpatialFilter spatialFilter; @@ -43,12 +46,17 @@ public class Filters { @JsonView({Public.class, Static.class}) private SpaceContext context = DEFAULT; - public String getPropertyFilter() { + public PropertiesQuery getPropertyFilter() { return propertyFilter; } - public void setPropertyFilter(String propertyFilter) { - this.propertyFilter = propertyFilter; + public void setPropertyFilter(Object propertyFilter) { + if (propertyFilter instanceof PropertiesQuery propFilter) + this.propertyFilter = propFilter; + else if (propertyFilter instanceof String propFilter) { + this.propertyFilter = PropertiesQuery.fromString(propFilter); + this.propertyFilterAsString = propFilter; + } } public Filters withPropertyFilter(String propertyFilter) { @@ -56,6 +64,11 @@ public Filters withPropertyFilter(String propertyFilter) { return this; } + //TODO: Remove after V1 got shutdown + public String getPropertyFilterAsString() { + return propertyFilterAsString; + } + public SpatialFilter getSpatialFilter() { return spatialFilter; } @@ -83,12 +96,11 @@ public Filters withContext(SpaceContext context) { } @JsonIgnore - public String getHash() - { - String input = "#" + (propertyFilter != null ? propertyFilter : "" ) - + "#" + (spatialFilter != null ? serialize(spatialFilter) : "") - + "#"; + public String getHash() { + String input = "#" + (getPropertyFilterAsString() != null ? getPropertyFilterAsString() : "") + + "#" + (spatialFilter != null ? serialize(spatialFilter) : "") + + "#"; - return Hasher.getHash(input); + return Hasher.getHash(input); } } diff --git a/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/steps/JobCompiler.java b/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/steps/JobCompiler.java index 4b55be5cc0..cf5150b9e7 100644 --- a/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/steps/JobCompiler.java +++ b/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/steps/JobCompiler.java @@ -20,6 +20,7 @@ package com.here.xyz.jobs.steps; import com.here.xyz.jobs.Job; +import com.here.xyz.jobs.steps.compiler.ExportToFiles; import com.here.xyz.jobs.steps.compiler.ImportFromFiles; import com.here.xyz.jobs.steps.compiler.JobCompilationInterceptor; import com.here.xyz.util.Async; @@ -38,6 +39,7 @@ public class JobCompiler { static { registerCompilationInterceptor(ImportFromFiles.class); + registerCompilationInterceptor(ExportToFiles.class); //registerCompilationInterceptor(CopySpaceToSpace.class); } diff --git a/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/steps/compiler/CopySpaceToSpace.java b/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/steps/compiler/CopySpaceToSpace.java index b78a832bcc..83068748b9 100644 --- a/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/steps/compiler/CopySpaceToSpace.java +++ b/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/steps/compiler/CopySpaceToSpace.java @@ -49,7 +49,8 @@ public CompilationStepGraph compile(Job job) { if(filters != null) { //filters.context is not supported - copySpaceStep.setPropertyFilter(filters.getPropertyFilter()); + //TODO: work with propertiesQueryObject + //copySpaceStep.setPropertyFilter(filters.getPropertyFilter()); SpatialFilter spatialFilter = filters.getSpatialFilter(); if (spatialFilter != null) { diff --git a/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/steps/compiler/ExportToFiles.java b/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/steps/compiler/ExportToFiles.java new file mode 100644 index 0000000000..961e333c9a --- /dev/null +++ b/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/steps/compiler/ExportToFiles.java @@ -0,0 +1,88 @@ +/* + * Copyright (C) 2017-2024 HERE Europe B.V. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * License-Filename: LICENSE + */ + +package com.here.xyz.jobs.steps.compiler; + +import com.here.xyz.jobs.Job; +import com.here.xyz.jobs.datasets.DatasetDescription.Space; +import com.here.xyz.jobs.datasets.Files; + +import com.here.xyz.jobs.datasets.files.GeoJson; +import com.here.xyz.jobs.steps.CompilationStepGraph; +import com.here.xyz.jobs.steps.Config; +import com.here.xyz.jobs.steps.JobCompiler; +import com.here.xyz.jobs.steps.impl.transport.ExportSpaceToFiles; +import com.here.xyz.models.hub.Ref; +import com.here.xyz.responses.StatisticsResponse; +import com.here.xyz.util.web.HubWebClient; + +import java.util.HashSet; +import java.util.Set; + +public class ExportToFiles implements JobCompilationInterceptor { + public static Set> allowedSourceTypes = new HashSet<>(Set.of(Space.class)); + + @Override + public boolean chooseMe(Job job) { + return job.getTarget() instanceof Files + && allowedSourceTypes.contains(job.getSource().getClass()) + && ((Files) job.getTarget()).getOutputSettings().getFormat() instanceof GeoJson; + } + + @Override + public CompilationStepGraph compile(Job job) { + Space source = (Space) job.getSource(); + resolveVersionRef(source); + String spaceId = source.getId(); + + ExportSpaceToFiles exportToFilesStep = new ExportSpaceToFiles() + .withSpaceId(spaceId) + .withJobId(job.getId()) + .withSpatialFilter(source.getFilters() != null ? source.getFilters().getSpatialFilter() : null) + .withPropertyFilter(source.getFilters() != null ? source.getFilters().getPropertyFilter() : null) + .withContext(source.getFilters() != null ? source.getFilters().getContext() : null) + .withVersionRef(source.getVersionRef() != null ? source.getVersionRef() : null); + + return compileImportSteps(exportToFilesStep); + } + + public static CompilationStepGraph compileImportSteps(ExportSpaceToFiles exportToFilesStep) { + return (CompilationStepGraph) new CompilationStepGraph() + .addExecution(exportToFilesStep); + } + + private void resolveVersionRef(Space sourceSpace) { + if(sourceSpace.getVersionRef() == null || sourceSpace.getVersionRef().isRange()) + return; + + try { + if (sourceSpace.getVersionRef().isHead()) { + StatisticsResponse statisticsResponse = HubWebClient.getInstance(Config.instance.HUB_ENDPOINT).loadSpaceStatistics(sourceSpace.getId()); + sourceSpace.setVersionRef(new Ref(statisticsResponse.getMaxVersion().getValue())); + } else if (sourceSpace.getVersionRef().isTag()) { + long version = HubWebClient.getInstance(Config.instance.HUB_ENDPOINT).loadTag(sourceSpace.getId(), sourceSpace.getVersionRef().getTag()).getVersion(); + if (version >= 0) { + sourceSpace.setVersionRef(new Ref(version)); + } + } + }catch (Exception e) { + throw new JobCompiler.CompilationError("Unable to resolve '" + sourceSpace.getVersionRef() + "' VersionRef!"); + } + } +} diff --git a/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/steps/execution/GraphTransformer.java b/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/steps/execution/GraphTransformer.java index 859954eeef..fb441993b9 100644 --- a/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/steps/execution/GraphTransformer.java +++ b/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/steps/execution/GraphTransformer.java @@ -25,6 +25,7 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.here.xyz.XyzSerializable; +import com.here.xyz.jobs.service.Config; import com.here.xyz.jobs.steps.Step; import com.here.xyz.jobs.steps.StepExecution; import com.here.xyz.jobs.steps.StepGraph; @@ -204,10 +205,21 @@ private List compileExecutions(List executions, State private NamedState compile(Step step, State.Builder previousState) { NamedState state = new NamedState<>(step.getClass().getSimpleName() + "." + step.getId(), TaskState.builder()); - if (step instanceof LambdaBasedStep lambdaStep) + + if (step instanceof RunEmrJob emrStep) { + if( Config.instance.LOCALSTACK_ENDPOINT == null) + compile(emrStep, state); + else { + //Inject defaults for local execution + emrStep.setSparkParams( "--add-exports=java.base/java.nio=ALL-UNNAMED " + + "--add-exports=java.base/sun.nio.ch=ALL-UNNAMED " + + "--add-exports=java.base/java.lang.invoke=ALL-UNNAMED " + + "--add-exports=java.base/java.util=ALL-UNNAMED " + emrStep.getSparkParams()); + compile((LambdaBasedStep) emrStep, state); + } + } + else if (step instanceof LambdaBasedStep lambdaStep) compile(lambdaStep, state); - else if (step instanceof RunEmrJob emrStep) - compile(emrStep, state); else throw new NotImplementedException("The provided step implementation (" + step.getClass().getSimpleName() + ") is not supported."); //TODO: Add other implementations here (e.g. EcsBasedStep) diff --git a/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/util/test/JobTestBase.java b/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/util/test/JobTestBase.java new file mode 100644 index 0000000000..20c1f09bba --- /dev/null +++ b/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/util/test/JobTestBase.java @@ -0,0 +1,176 @@ +package com.here.xyz.jobs.util.test; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.here.xyz.XyzSerializable; +import com.here.xyz.jobs.Job; +import com.here.xyz.jobs.RuntimeInfo; +import com.here.xyz.jobs.RuntimeStatus; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.io.IOException; +import java.io.OutputStream; +import java.net.HttpURLConnection; +import java.net.URI; +import java.net.URL; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +import static com.google.common.net.HttpHeaders.CONTENT_TYPE; +import static com.google.common.net.MediaType.JSON_UTF_8; +import static java.net.http.HttpClient.Redirect.NORMAL; + +public class JobTestBase extends StepTestBase { + private static final Logger logger = LogManager.getLogger(); + + /** Job-Api related */ + public static String createJob(Job job) throws IOException, InterruptedException { + logger.info("Creating job ..."); + HttpResponse jobResponse = post("/jobs", job); + + logger.info("Got response:"); + logger.info(new String(jobResponse.body())); + + Job createdJob = XyzSerializable.deserialize(jobResponse.body(), Job.class); + return createdJob.getId(); + } + + public static void uploadFileToJob(String jobId, byte[] fileContent) throws IOException, InterruptedException { + HttpResponse inputResponse = post("/jobs/" + jobId + "/inputs", Map.of("type", "UploadUrl")); + String uploadUrl = (String) XyzSerializable.deserialize(inputResponse.body(), Map.class).get("url"); + uploadUrl = uploadUrl.replace("localstack","localhost"); + uploadInputFile(fileContent, new URL(uploadUrl)); + } + + public static void uploadFilesToJob(String jobId, List fileContents) throws IOException, InterruptedException { + //Generate N Files with M features + for(byte[] fileContent :fileContents) { + uploadFileToJob(jobId, fileContent); + } + } + + public static void startJob(String jobId) throws IOException, InterruptedException { + logger.info("Starting job ..."); + patch("/jobs/" + jobId + "/status", Map.of("desiredAction", "START")); + } + + public static void deleteJob(String jobId) throws IOException, InterruptedException { + logger.info("Deleting job ..."); + delete("/jobs/" + jobId ); + } + + public static RuntimeStatus getJobStatus(String jobId) throws IOException, InterruptedException { + logger.info("Get job ..."); + HttpResponse statusResponse = get("/jobs/" + jobId + "/status"); + return XyzSerializable.deserialize(statusResponse.body(), RuntimeStatus.class); + } + + public static List getJobOutputs(String jobId) throws IOException, InterruptedException { + logger.info("Get job Outputs ..."); + HttpResponse outputResponse = get("/jobs/" + jobId + "/outputs"); + return XyzSerializable.deserialize(outputResponse.body(), new TypeReference>() {}); + } + + public static void pollJobStatus(String jobId) throws InterruptedException { + ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(); + + //Poll job status every 5 seconds + executor.scheduleAtFixedRate(() -> { + try { + RuntimeStatus status = getJobStatus(jobId); + logger.info("Job state for {}: {} ({}/{} steps succeeded)", jobId, status.getState(), status.getSucceededSteps(), + status.getOverallStepCount()); + if (status.getState().isFinal()) { + if(!status.getState().equals(RuntimeInfo.State.SUCCEEDED)) + logger.info("Job state for {} is not SUCCEEDED: {}", jobId, XyzSerializable.serialize(status)); + executor.shutdownNow(); + } + } + catch (Exception e) { + logger.error(e); + throw new RuntimeException(e); + } + }, 0, 5, TimeUnit.SECONDS); + + int timeoutSeconds = 120; + if (!executor.awaitTermination(timeoutSeconds, TimeUnit.SECONDS)) { + executor.shutdownNow(); + logger.info("Stopped polling status for job {} after timeout {} seconds", jobId, timeoutSeconds); + } + } + + private static HttpResponse get(String path) throws IOException, InterruptedException { + return request("GET", path, null); + } + + private static HttpResponse post(String path, Object requestPayload) throws IOException, InterruptedException { + return request("POST", path, requestPayload); + } + + private static HttpResponse patch(String path, Object requestPayload) throws IOException, InterruptedException { + return request("PATCH", path, requestPayload); + } + + private static HttpResponse delete(String path) throws IOException, InterruptedException { + return request("DELETE", path, null); + } + + private static HttpResponse request(String method, String path, Object requestPayload) throws IOException, InterruptedException { + HttpRequest.BodyPublisher bodyPublisher = requestPayload == null ? HttpRequest.BodyPublishers.noBody() + : HttpRequest.BodyPublishers.ofByteArray(XyzSerializable.serialize(requestPayload).getBytes()); + + HttpRequest request = HttpRequest.newBuilder() + .uri(URI.create(config.JOB_API_ENDPOINT + path)) + .header(CONTENT_TYPE, JSON_UTF_8.toString()) + .method(method, bodyPublisher) + .version(HttpClient.Version.HTTP_1_1) + .build(); + + HttpClient client = HttpClient.newBuilder().followRedirects(NORMAL).build(); + HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofByteArray()); + if (response.statusCode() >= 400) + throw new RuntimeException("Received error response with status code: " + response.statusCode() + " response:\n" + + new String(response.body())); + return response; + } + + private static void uploadInputFile(byte[] data, URL uploadUrl) throws IOException { + HttpURLConnection connection = (HttpURLConnection) uploadUrl.openConnection(); + connection.setDoOutput(true); + connection.setRequestProperty("Content-Type", "application/json"); + connection.setRequestMethod("PUT"); + OutputStream out = connection.getOutputStream(); + + out.write(data); + out.close(); + + if (connection.getResponseCode() < 200 || connection.getResponseCode() > 299) + throw new RuntimeException("Error uploading file, got status code " + connection.getResponseCode()); + } + + protected void createSelfRunningJob(Job job) throws Exception { + /** Create Job - expect autostart */ + createJob(job); + /** Wait till Job reached final state */ + pollJobStatus(job.getId()); + } + + protected void createAndStartJob(Job job, byte[] fileContent) throws Exception { + /** Create Job */ + createJob(job); + /** Upload content if provided */ + if(fileContent != null) + uploadFileToJob(job.getId(), fileContent); + /** Start Job execution */ + startJob(job.getId()); + /** Wait till Job reached final state */ + pollJobStatus(job.getId()); + + } +} diff --git a/xyz-jobs/xyz-job-service/src/test/java/com/here/xyz/jobs/ExportJobTestIT.java b/xyz-jobs/xyz-job-service/src/test/java/com/here/xyz/jobs/ExportJobTestIT.java new file mode 100644 index 0000000000..07b77e0710 --- /dev/null +++ b/xyz-jobs/xyz-job-service/src/test/java/com/here/xyz/jobs/ExportJobTestIT.java @@ -0,0 +1,36 @@ +package com.here.xyz.jobs; + +import com.here.xyz.jobs.datasets.DatasetDescription; +import com.here.xyz.jobs.datasets.FileOutputSettings; +import com.here.xyz.jobs.datasets.Files; +import com.here.xyz.jobs.datasets.files.GeoJson; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static com.here.xyz.jobs.datasets.files.FileFormat.EntityPerLine.Feature; + +public class ExportJobTestIT extends JobTest { + + @BeforeEach + public void setUp() { + super.setUp(); + putRandomFeatureCollectionToSpace(SPACE_ID,50); + } + + @Test + public void testSimpleExport() throws Exception { + Job exportJob = buildExportJob(); + createSelfRunningJob(exportJob); + + checkSucceededJob(exportJob); + deleteJob(exportJob.getId()); + } + + private Job buildExportJob() { + return new Job() + .withId(JOB_ID) + .withDescription("Export Job Test") + .withSource(new DatasetDescription.Space<>().withId(SPACE_ID)) + .withTarget(new Files<>().withOutputSettings(new FileOutputSettings().withFormat(new GeoJson().withEntityPerLine(Feature)))); + } +} diff --git a/xyz-jobs/xyz-job-service/src/test/java/com/here/xyz/jobs/ImportJobTestIT.java b/xyz-jobs/xyz-job-service/src/test/java/com/here/xyz/jobs/ImportJobTestIT.java new file mode 100644 index 0000000000..ad8f1fca86 --- /dev/null +++ b/xyz-jobs/xyz-job-service/src/test/java/com/here/xyz/jobs/ImportJobTestIT.java @@ -0,0 +1,29 @@ +package com.here.xyz.jobs; + +import com.here.xyz.jobs.datasets.DatasetDescription; +import com.here.xyz.jobs.datasets.Files; +import com.here.xyz.jobs.datasets.files.FileInputSettings; +import com.here.xyz.jobs.datasets.files.GeoJson; +import com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace; +import com.here.xyz.jobs.util.test.ContentCreator; +import org.junit.jupiter.api.Test; + +import static com.here.xyz.jobs.datasets.files.FileFormat.EntityPerLine.Feature; + +public class ImportJobTestIT extends JobTest { + + @Test + public void testSimpleImport() throws Exception { + Job importJob = buildImportJob(); + createAndStartJob(importJob, ContentCreator.generateImportFileContent(ImportFilesToSpace.Format.GEOJSON, 50)); + deleteJob(importJob.getId()); + } + + private Job buildImportJob() { + return new Job() + .withId(JOB_ID) + .withDescription("Import Job Test") + .withSource(new Files<>().withInputSettings(new FileInputSettings().withFormat(new GeoJson().withEntityPerLine(Feature)))) + .withTarget(new DatasetDescription.Space<>().withId(SPACE_ID)); + } +} diff --git a/xyz-jobs/xyz-job-service/src/test/java/com/here/xyz/jobs/JobTest.java b/xyz-jobs/xyz-job-service/src/test/java/com/here/xyz/jobs/JobTest.java new file mode 100644 index 0000000000..9627fb7f23 --- /dev/null +++ b/xyz-jobs/xyz-job-service/src/test/java/com/here/xyz/jobs/JobTest.java @@ -0,0 +1,26 @@ +package com.here.xyz.jobs; + +import com.here.xyz.jobs.util.test.JobTestBase; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; + +import java.io.IOException; + +public class JobTest extends JobTestBase { + @BeforeEach + public void setUp() { + createSpace(SPACE_ID); + } + + @AfterEach + public void tearDown() { + deleteSpace(SPACE_ID); + } + + protected void checkSucceededJob(Job exportJob) throws IOException, InterruptedException { + RuntimeStatus status = getJobStatus(exportJob.getId()); + Assertions.assertEquals(RuntimeInfo.State.SUCCEEDED, status.getState()); + Assertions.assertEquals(status.getOverallStepCount(), status.getSucceededSteps()); + } +} diff --git a/xyz-jobs/xyz-job-service/src/test/java/com/here/xyz/jobs/steps/compiler/ExportToFilesTest.java b/xyz-jobs/xyz-job-service/src/test/java/com/here/xyz/jobs/steps/compiler/ExportToFilesTest.java new file mode 100644 index 0000000000..00b48bbf85 --- /dev/null +++ b/xyz-jobs/xyz-job-service/src/test/java/com/here/xyz/jobs/steps/compiler/ExportToFilesTest.java @@ -0,0 +1,66 @@ +package com.here.xyz.jobs.steps.compiler; + +import com.here.xyz.jobs.Job; +import com.here.xyz.jobs.JobTest; +import com.here.xyz.jobs.datasets.DatasetDescription; +import com.here.xyz.jobs.datasets.FileOutputSettings; +import com.here.xyz.jobs.datasets.Files; +import com.here.xyz.jobs.datasets.files.GeoJson; +import com.here.xyz.jobs.steps.CompilationStepGraph; +import com.here.xyz.jobs.steps.JobCompiler.CompilationError; +import com.here.xyz.jobs.steps.impl.transport.ExportSpaceToFiles; +import com.here.xyz.models.hub.Ref; +import com.here.xyz.models.hub.Space; +import com.here.xyz.models.hub.Tag; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static com.here.xyz.jobs.datasets.files.FileFormat.EntityPerLine.Feature; + +public class ExportToFilesTest extends JobTest { + + @BeforeEach + public void setUp() { + createSpace(new Space() + .withId(SPACE_ID) + .withVersionsToKeep(10) + , false); + + putRandomFeatureCollectionToSpace(SPACE_ID, 2); + putRandomFeatureCollectionToSpace(SPACE_ID, 2); + putRandomFeatureCollectionToSpace(SPACE_ID, 2); + } + + @Test + public void testResolveHeadVersion(){ + CompilationStepGraph graph = new ExportToFiles().compile(buildExportJobWithVersionRef(new Ref("HEAD"))); + //HEAD should point to version 3 + Assertions.assertEquals(3, ((ExportSpaceToFiles)graph.getExecutions().get(0)).getVersionRef().getVersion()); + } + + @Test + public void testResolveNotExistingTag(){ + //NA not exists - should fail + Assertions.assertThrows(CompilationError.class, () -> new ExportToFiles().compile(buildExportJobWithVersionRef(new Ref("NA")))); + } + + @Test + public void testResolveExistingTag(){ + String tagName = "TAG1"; + int tagVersion = 2; + + createTag(SPACE_ID, new Tag().withId(tagName).withVersion(tagVersion)); + + CompilationStepGraph graph = new ExportToFiles().compile(buildExportJobWithVersionRef(new Ref(tagName))); + Assertions.assertEquals(tagVersion, ((ExportSpaceToFiles)graph.getExecutions().get(0)).getVersionRef().getVersion()); + } + + private Job buildExportJobWithVersionRef(Ref versionRef) { + return new Job() + .withId(JOB_ID) + .withDescription("Export Job Test") + .withSource(new DatasetDescription.Space<>().withId(SPACE_ID).withVersionRef(versionRef)) + .withTarget(new Files<>().withOutputSettings(new FileOutputSettings().withFormat(new GeoJson().withEntityPerLine(Feature)))); + } +} diff --git a/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/datasets/filters/SpatialFilter.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/datasets/filters/SpatialFilter.java similarity index 84% rename from xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/datasets/filters/SpatialFilter.java rename to xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/datasets/filters/SpatialFilter.java index fb57c750fe..3b3f5f9062 100644 --- a/xyz-jobs/xyz-job-service/src/main/java/com/here/xyz/jobs/datasets/filters/SpatialFilter.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/datasets/filters/SpatialFilter.java @@ -23,11 +23,13 @@ import com.fasterxml.jackson.annotation.JsonView; import com.here.xyz.XyzSerializable.Public; import com.here.xyz.models.geojson.exceptions.InvalidGeometryException; +import com.here.xyz.util.geo.GeometryValidator.GeometryException; import com.here.xyz.models.geojson.implementation.Geometry; +import com.here.xyz.util.geo.GeometryValidator; +import com.here.xyz.util.service.BaseHttpServerVerticle; @JsonInclude(JsonInclude.Include.NON_DEFAULT) public class SpatialFilter { - @JsonView({Public.class}) private Geometry geometry; @@ -101,4 +103,13 @@ public SpatialFilter withClip(final boolean clipped) { setClipped(clipped); return this; } + + public void validateSpatialFilter() throws BaseHttpServerVerticle.ValidationException { + try { + GeometryValidator.validateGeometry(this.geometry, this.radius); + } + catch (GeometryException e){ + throw new BaseHttpServerVerticle.ValidationException(e.getMessage()); + } + } } diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/Step.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/Step.java index 842bcefd6d..91379a5822 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/Step.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/Step.java @@ -83,6 +83,8 @@ public abstract class Step implements Typed, StepExecution { @JsonView({Internal.class, Static.class}) private boolean useSystemInput; @JsonView({Internal.class, Static.class}) + private boolean useSystemOutput; + @JsonView({Internal.class, Static.class}) private Set inputStepIds; /** @@ -445,6 +447,19 @@ public T withUseSystemInput(boolean useSystemInput) { return (T) this; } + public boolean isUseSystemOutput() { + return useSystemOutput; + } + + public void setUseSystemOutput(boolean useSystemOutput) { + this.useSystemOutput = useSystemOutput; + } + + public T withUseSystemOutput(boolean useSystemOutput) { + setUseSystemOutput(useSystemOutput); + return (T) this; + } + public Set getInputStepIds() { return inputStepIds; } diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/LambdaBasedStep.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/LambdaBasedStep.java index 9e9af4599a..d2c167ded8 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/LambdaBasedStep.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/LambdaBasedStep.java @@ -48,6 +48,7 @@ import com.here.xyz.jobs.steps.inputs.Input; import com.here.xyz.jobs.util.JobWebClient; import com.here.xyz.util.ARN; +import com.here.xyz.util.runtime.LambdaFunctionRuntime; import com.here.xyz.util.service.aws.SimulatedContext; import com.here.xyz.util.web.XyzWebClient.ErrorResponseException; import com.here.xyz.util.web.XyzWebClient.WebClientException; @@ -75,7 +76,8 @@ import software.amazon.awssdk.services.sfn.model.TaskTimedOutException; @JsonSubTypes({ - @JsonSubTypes.Type(value = DatabaseBasedStep.class) + @JsonSubTypes.Type(value = DatabaseBasedStep.class), + @JsonSubTypes.Type(value = RunEmrJob.class) }) public abstract class LambdaBasedStep extends Step { private static final String TASK_TOKEN_TEMPLATE = "$$.Task.Token"; @@ -117,6 +119,7 @@ public abstract class LambdaBasedStep extends Step private void startExecution() throws Exception { updateState(RUNNING); + switch (getExecutionMode()) { case SYNC -> { if (isResume()) @@ -358,6 +361,8 @@ private String truncate(String string, int maxLength) { } private void synchronizeStepState() { + if(isSimulation) + return; //NOTE: For steps that are part of a pipeline job, do not synchronize the state if (isPipeline()) return; @@ -480,6 +485,8 @@ public void handleRequest(InputStream inputStream, OutputStream outputStream, Co //Read the incoming request request = XyzSerializable.deserialize(inputStream, LambdaStepRequest.class); + new LambdaFunctionRuntime(context, request.getStep().getGlobalStepId()); + if (request.getStep() == null) throw new NullPointerException("Malformed step request, missing step definition."); diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/RunEmrJob.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/RunEmrJob.java index 3fee631d47..cbb1c7a52d 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/RunEmrJob.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/RunEmrJob.java @@ -19,13 +19,35 @@ package com.here.xyz.jobs.steps.execution; -import com.here.xyz.jobs.steps.Step; +import static com.here.xyz.jobs.steps.execution.LambdaBasedStep.ExecutionMode.SYNC; + +import com.amazonaws.services.s3.model.AmazonS3Exception; +import com.amazonaws.services.s3.model.S3ObjectSummary; import com.here.xyz.jobs.steps.inputs.Input; +import com.here.xyz.jobs.steps.outputs.DownloadUrl; import com.here.xyz.jobs.steps.resources.Load; +import com.here.xyz.jobs.util.S3Client; import com.here.xyz.util.service.BaseHttpServerVerticle.ValidationException; +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; import java.util.List; +import java.util.Map; +import java.util.UUID; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public class RunEmrJob extends LambdaBasedStep { + private static final Logger logger = LogManager.getLogger(); -public class RunEmrJob extends Step { private String applicationId; private String executionRoleArn; private String jarUrl; @@ -53,26 +75,86 @@ public String getDescription() { return "Runs a serverless EMR job on application " + applicationId; } + //Gets only executed when running locally (see GraphTransformer) @Override public void execute() throws Exception { - //NOTE: As this step is just a "configuration holder", this method should never actually be called - throw new RuntimeException("RunEmrJob#execute() was called."); + //Create the local target directory in which EMR writes the output + String localTmpOutputsFolder = createLocalFolder(S3Client.getKeyFromS3Uri(scriptParams.get(1)), true); + + //Download EMR executable JAR from S3 to local + String localJarPath = copyFileFromS3ToLocal(jarUrl); + //Copy step input files from S3 to local /tmp + String localTmpInputsFolder = copyFolderFromS3ToLocal(S3Client.getKeyFromS3Uri(scriptParams.get(0))); + + List localScriptParams = new ArrayList<>(scriptParams); + + localScriptParams.set(0, localTmpInputsFolder); + localScriptParams.set(1, localTmpOutputsFolder); + localScriptParams.add("--local"); + + sparkParams = sparkParams.replace("$localJarPath$", localJarPath); + sparkParams = "java -Xshare:off --add-exports=java.base/java.nio=ALL-UNNAMED " + + "--add-exports=java.base/sun.nio.ch=ALL-UNNAMED " + + "--add-exports=java.base/java.lang.invoke=ALL-UNNAMED " + + "--add-exports=java.base/java.util=ALL-UNNAMED " + + sparkParams; + + List emrParams = new ArrayList<>(List.of(sparkParams.split(" "))); + emrParams.addAll(localScriptParams); + + logger.info("Start local EMR job with the following params: {} ", emrParams.toString()); + + ProcessBuilder processBuilder = new ProcessBuilder(emrParams); + //Modify the environment variables of the process to clear any JDWP options + //to avoid -agentlib:jdwp=transport=dt_socket + Map env = processBuilder.environment(); + env.remove("_JAVA_OPTIONS"); + env.remove("JAVA_TOOL_OPTIONS"); + + //Combine stdout and stderr + processBuilder.redirectErrorStream(true); + Process process = processBuilder.start(); + + //Capture and log the output of the JAR process + BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream())); + String line; + + while ((line = reader.readLine()) != null) + logger.info("[EMR-local] {}", line); + + int exitCode = process.waitFor(); + + if (exitCode != 0) + throw new RuntimeException("Local EMR execution failed with exit code " + exitCode + ". Please check the logs."); + + //Upload EMR files, which are stored locally + uploadEMRResulstToS3(new File(localTmpOutputsFolder), S3Client.getKeyFromS3Uri(scriptParams.get(1))); } @Override public void resume() throws Exception { //NOTE: As this step is just a "configuration holder", this method should never actually be called - throw new RuntimeException("RunEmrJob#execute() was called."); + throw new RuntimeException("RunEmrJob#resume() was called."); } @Override public void cancel() throws Exception { //NOTE: As this step is just a "configuration holder", this method should never actually be called - throw new RuntimeException("RunEmrJob#execute() was called."); + throw new RuntimeException("RunEmrJob#cancel() was called."); } @Override public boolean validate() throws ValidationException { + if (scriptParams == null) + throw new ValidationException("ScriptParams are mandatory!"); //TODO: Check if this is really needed for *all* EMR jobs (if not move to according sub-class) + if (sparkParams == null) + throw new ValidationException("SparkParams are mandatory!"); //TODO: Check if this is really needed for *all* EMR jobs (if not move to according sub-class) + if (jarUrl == null) + throw new ValidationException("JAR URL is mandatory!"); + //TODO: Move the ScriptParams length check into the according sub-class + if (scriptParams.size() < 2) + throw new ValidationException("ScriptParams length is to small!"); + return !isInputsExpected() || currentInputsCount(Input.class) > 0; } @@ -153,4 +235,114 @@ public RunEmrJob withInputsExpected(boolean inputsExpected) { setInputsExpected(inputsExpected); return this; } + + private String getLocalTmpPath(String s3Path) { + final String localRootPath = "/tmp/"; + return localRootPath + s3Path; + } + + /** + * @param s3Path + * @return Local path of tmp directory + */ + private String copyFileFromS3ToLocal(String s3Path) { + //Lambda allows writing to /tmp folder - Jar file could be bigger than 512MB + try { + logger.info("Copy file: '{}' to local.", s3Path); + InputStream jarStream = S3Client.getInstance().streamObjectContent(s3Path); + + //Create local target Folder + createLocalFolder(Paths.get(s3Path).getParent().toString(), false); + Files.copy(jarStream, Paths.get(getLocalTmpPath(s3Path))); + jarStream.close(); + } catch (FileAlreadyExistsException e) { + logger.info("File: '{}' already exists locally - skip download.", s3Path); + }catch (AmazonS3Exception e){ + throw new RuntimeException("Can't download File: '" + s3Path + "' for local copy!", e); + } catch (IOException e) { + throw new RuntimeException("Can't copy File: '" + s3Path + "'!", e); + } + return getLocalTmpPath(s3Path); + } + + /** + * @param s3Path + * @return Local path of tmp directory + */ + private String copyFolderFromS3ToLocal(String s3Path) { + List s3ObjectSummaries = S3Client.getInstance().scanFolder(s3Path); + + for (S3ObjectSummary s3ObjectSummary : s3ObjectSummaries) { + if (!s3ObjectSummary.getKey().contains("modelBased")) + copyFileFromS3ToLocal(s3ObjectSummary.getKey()); + } + return getLocalTmpPath(s3Path); + } + + private static void deleteDirectory(File directory) { + if (directory.isDirectory()) { + //Get all files and directories within the directory + File[] files = directory.listFiles(); + if (files != null) { + //Recursively delete each file and subdirectory + for (File file : files) { + deleteDirectory(file); + } + } + } + // Delete the directory or file + directory.delete(); + } + + /** + * @param s3Path + * @return + * @throws IOException + */ + private String createLocalFolder(String s3Path, boolean deleteBefore) throws IOException { + Path path = Paths.get(getLocalTmpPath(s3Path)); + + //TODO: Use the step ID as prefix within /tmp instead + if (deleteBefore) + deleteDirectory(path.getParent().toFile()); + + Files.createDirectories(path); + + return getLocalTmpPath(s3Path); + } + + private void uploadEMRResulstToS3(File emrOutputDir, String s3TargetPath) throws IOException { + if (emrOutputDir.exists() && emrOutputDir.isDirectory()) { + File[] files = emrOutputDir.listFiles(); + + if (files == null) { + logger.info("EMR job has not produced any files!"); + return; + } + + for (File file : files) { + //TODO: check why this happens + if (file.getPath().endsWith("crc")) + continue; + //TODO: skip _SUCCESS? + + logger.info("Store local file {} to {} ", file, s3TargetPath); + //TODO: Check if this is the correct content-type + new DownloadUrl() + .withContentType("text") + .withContent(Files.readAllBytes(file.toPath())) + .store(s3TargetPath + "/" + UUID.randomUUID()); + } + } + } + + @Override + public LambdaBasedStep.AsyncExecutionState getExecutionState() throws LambdaBasedStep.UnknownStateException { + throw new UnknownStateException("RunEmrJob runs in SYNC mode only."); + } + + @Override + public LambdaBasedStep.ExecutionMode getExecutionMode() { + return SYNC; + } } diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/db/Database.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/db/Database.java index a419200fd1..c97412785c 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/db/Database.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/db/Database.java @@ -21,8 +21,8 @@ import static com.here.xyz.jobs.steps.execution.db.Database.DatabaseRole.READER; import static com.here.xyz.jobs.steps.execution.db.Database.DatabaseRole.WRITER; -import static com.here.xyz.util.db.DatabaseSettings.PSQL_HOST; -import static com.here.xyz.util.db.DatabaseSettings.PSQL_REPLICA_HOST; +import static com.here.xyz.util.db.datasource.DatabaseSettings.PSQL_HOST; +import static com.here.xyz.util.db.datasource.DatabaseSettings.PSQL_REPLICA_HOST; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; @@ -32,9 +32,9 @@ import com.here.xyz.models.hub.Connector; import com.here.xyz.util.Hasher; import com.here.xyz.util.db.ConnectorParameters; -import com.here.xyz.util.db.DatabaseSettings; import com.here.xyz.util.db.ECPSTool; import com.here.xyz.util.db.datasource.DataSourceProvider; +import com.here.xyz.util.db.datasource.DatabaseSettings; import com.here.xyz.util.db.datasource.PooledDataSources; import com.here.xyz.util.web.HubWebClient; import com.here.xyz.util.web.HubWebClientAsync; @@ -61,7 +61,7 @@ import software.amazon.awssdk.services.rds.model.DBCluster; public class Database extends ExecutionResource { - private static final List DEFAULT_SEARCH_PATH = List.of("common", "feature_writer"); //TODO: Replace with generated ones by Script tool, once scripts are installed by Step Lambda + private static final String SCRIPT_RESOURCE_PATH = "/sql"; private static final Logger logger = LogManager.getLogger(); private static final float DB_MAX_JOB_UTILIZATION_PERCENTAGE = 0.6f; private static final Pattern RDS_CLUSTER_HOSTNAME_PATTERN = Pattern.compile("(.+).cluster-.*.rds.amazonaws.com.*"); @@ -115,7 +115,7 @@ DatabaseSettings getDatabaseSettings() { if (dbSettings == null) dbSettings = new RestrictedDatabaseSettings(getName(), connectorDbSettingsMap) .withApplicationName("JobFramework") - .withSearchPath(DEFAULT_SEARCH_PATH); + .withScriptResourcePaths(List.of(SCRIPT_RESOURCE_PATH)); dbSettings.setStatementTimeoutSeconds(600); return dbSettings; } @@ -209,7 +209,8 @@ private static List loadDatabasesForConnector(Connector connector) { connectorParameters.getEcps()); fixLocalDbHosts(connectorDbSettingsMap); - DatabaseSettings connectorDbSettings = new DatabaseSettings(connector.id, connectorDbSettingsMap); + DatabaseSettings connectorDbSettings = new DatabaseSettings(connector.id, connectorDbSettingsMap) + .withScriptResourcePaths(List.of(SCRIPT_RESOURCE_PATH)); String rdsClusterId = getClusterIdFromHostname(connectorDbSettings.getHost()); diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/db/DatabaseBasedStep.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/db/DatabaseBasedStep.java index 8b6e8c392f..3cc1904793 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/db/DatabaseBasedStep.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/db/DatabaseBasedStep.java @@ -47,6 +47,7 @@ @JsonSubTypes.Type(value = SpaceBasedStep.class) }) public abstract class DatabaseBasedStep extends LambdaBasedStep { + private static final Logger logger = LogManager.getLogger(); private double claimedAcuLoad; @JsonView(Internal.class) diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/db/SingleDatabaseSettings.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/db/SingleDatabaseSettings.java index d696f029b3..52eb5b4e13 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/db/SingleDatabaseSettings.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/execution/db/SingleDatabaseSettings.java @@ -20,7 +20,7 @@ package com.here.xyz.jobs.steps.execution.db; import com.here.xyz.jobs.steps.execution.db.Database.DatabaseRole; -import com.here.xyz.util.db.DatabaseSettings; +import com.here.xyz.util.db.datasource.DatabaseSettings; import java.util.Map; public class SingleDatabaseSettings extends DatabaseSettings { diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/DropIndexes.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/DropIndexes.java index 109148f430..9af3f659e2 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/DropIndexes.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/DropIndexes.java @@ -34,6 +34,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Map; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -95,6 +96,8 @@ public void execute() throws SQLException, TooManyResourcesClaimed, WebClientExc logger.info("No indices to found. None will be dropped for space " + getSpaceId()); } else { + logger.info("[{}] Deactivating the space {} ...", getGlobalStepId(), getSpaceId()); + hubWebClient().patchSpace(getSpaceId(), Map.of("active", false)); logger.info("Dropping the following indices for space " + getSpaceId() + ": " + indexes); List dropQueries = buildSpaceTableDropIndexQueries(getSchema(db), indexes); SQLQuery dropIndexesQuery = SQLQuery.join(dropQueries, ";"); diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/MarkForMaintenance.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/MarkForMaintenance.java index 9c92e876cd..ad0e4ab849 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/MarkForMaintenance.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/MarkForMaintenance.java @@ -31,6 +31,7 @@ import java.sql.SQLException; import java.util.Collections; import java.util.List; +import java.util.Map; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -85,6 +86,12 @@ public void execute() throws WebClientException, SQLException, TooManyResourcesC Space space = loadSpace(getSpaceId()); logger.info("Getting storage database for space {}", getSpaceId()); Database db = loadDatabase(space.getStorage().getId(), WRITER); + + if (!space.isActive()) { + logger.info("[{}] Re-activating the space {} ...", getGlobalStepId(), getSpaceId()); + hubWebClient().patchSpace(getSpaceId(), Map.of("active", true)); + } + runReadQueryAsync(buildMarkForMaintenanceQuery(getSchema(db), getRootTableName(space)), db, calculateNeededAcus()); } diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/SpaceBasedStep.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/SpaceBasedStep.java index c44639c400..040a20a328 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/SpaceBasedStep.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/SpaceBasedStep.java @@ -19,14 +19,19 @@ package com.here.xyz.jobs.steps.impl; +import static com.here.xyz.jobs.steps.execution.db.Database.DatabaseRole.WRITER; +import static com.here.xyz.jobs.steps.execution.db.Database.loadDatabase; import static com.here.xyz.util.db.pg.XyzSpaceTableHelper.getTableNameFromSpaceParamsOrSpaceId; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonView; import com.here.xyz.events.ContextAwareEvent.SpaceContext; import com.here.xyz.jobs.steps.Config; +import com.here.xyz.jobs.steps.execution.db.Database; import com.here.xyz.jobs.steps.execution.db.DatabaseBasedStep; import com.here.xyz.jobs.steps.impl.transport.CopySpace; +import com.here.xyz.jobs.steps.impl.transport.ExportSpaceToFiles; import com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace; import com.here.xyz.models.hub.Space; import com.here.xyz.models.hub.Tag; @@ -40,6 +45,7 @@ @JsonSubTypes({ @JsonSubTypes.Type(value = CreateIndex.class), + @JsonSubTypes.Type(value = ExportSpaceToFiles.class), @JsonSubTypes.Type(value = ImportFilesToSpace.class), @JsonSubTypes.Type(value = DropIndexes.class), @JsonSubTypes.Type(value = AnalyzeSpaceTable.class), @@ -52,6 +58,15 @@ public abstract class SpaceBasedStep extends DatabaseB @JsonView({Internal.class, Static.class}) private String spaceId; + @JsonIgnore + private Database db; + + @JsonIgnore + private Space space; + + @JsonIgnore + protected Space superSpace; + public String getSpaceId() { return spaceId; } @@ -65,10 +80,6 @@ public T withSpaceId(String spaceId) { return (T) this; } - protected final String getRootTableName(String spaceId) throws WebClientException { - return getRootTableName(loadSpace(spaceId)); - } - protected final String getRootTableName(Space space) throws WebClientException { return getTableNameFromSpaceParamsOrSpaceId(space.getStorage().getParams(), space.getId(), ConnectorParameters.fromMap(hubWebClient().loadConnector(space.getStorage().getId()).params).isEnableHashedSpaceId()); @@ -106,6 +117,32 @@ protected HubWebClient hubWebClient() { return HubWebClient.getInstance(Config.instance.HUB_ENDPOINT); } + protected Database db() throws WebClientException { + if (db == null) { + logger.info("[{}] Loading database for space {}.", getGlobalStepId(), getSpaceId()); + db = loadDatabase(space().getStorage().getId(), WRITER); + } + return db; + } + + protected Space space() throws WebClientException { + if (space == null) { + logger.info("[{}] Loading space config for space {}.", getGlobalStepId(), getSpaceId()); + space = loadSpace(getSpaceId()); + } + return space; + } + + protected Space superSpace() throws WebClientException { + if (superSpace == null) { + logger.info("[{}] Loading space config for super-space {}.", getGlobalStepId(), getSpaceId()); + if (space().getExtension() == null) + throw new IllegalStateException("The space does not extend some other space. Could not load the super space."); + superSpace = loadSpace(space().getExtension().getSpaceId()); + } + return superSpace; + } + @Override public boolean validate() throws ValidationException { validateSpaceExists(); diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/tools/ResourceAndTimeCalculator.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/tools/ResourceAndTimeCalculator.java index 384ee08469..8f819c9564 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/tools/ResourceAndTimeCalculator.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/tools/ResourceAndTimeCalculator.java @@ -52,21 +52,43 @@ public boolean chooseMe() { } } + //Export Related... + public double calculateNeededExportAcus(long uncompressedUploadBytesEstimation) { + //maximum auf Acus - to prevent that job never gets executed. @TODO: check how to deal is maxUnits of DB + final double maxAcus = 70; + //exports are not that heavy than imports + final double exportQuotient = 2; + + //Calculate the needed ACUs + double neededAcus = calculateNeededAcusFromByteSize(uncompressedUploadBytesEstimation) / exportQuotient; + return Math.min(neededAcus, maxAcus); + } + + protected double exportTimeFactor(String spaceId, double seconds){ + return seconds; + } + + public int calculateExportTimeInSeconds(String spaceId, long byteSize){ + int warmUpTime = 10; + int bytesPerSecond = 57 * 1024 * 1024; + + return (int)(warmUpTime + exportTimeFactor(spaceId, ((double) byteSize / bytesPerSecond))); + } + //Import Related... protected double importTimeFactor(String spaceId, double bytesPerBillion){ return 0.44 * bytesPerBillion; } public int calculateImportTimeInSeconds(String spaceId, long byteSize, LambdaBasedStep.ExecutionMode executionMode){ - if(executionMode.equals(LambdaBasedStep.ExecutionMode.ASYNC)) { - int warmUpTime = 10; - double bytesPerBillion = byteSize / 1_000_000_000d; - return (int) (warmUpTime + importTimeFactor(spaceId, bytesPerBillion) * 60); - }else{ - int expectedHubThroughPutBytesPerSec = 800_000; - int overhead = 2; - return (int) (byteSize / expectedHubThroughPutBytesPerSec * overhead); + int warmUpTime = 10; + double bytesPerBillion = byteSize / 1_000_000_000d; + int totalTime = (int)(warmUpTime + importTimeFactor(spaceId, bytesPerBillion) * 60); + + if(executionMode.equals(LambdaBasedStep.ExecutionMode.SYNC)) { + totalTime *= 2; } + return totalTime; } public int calculateImportTimeoutSeconds(String spaceId, long byteSize, LambdaBasedStep.ExecutionMode executionMode) { @@ -87,7 +109,7 @@ public double calculateNeededImportAcus(long uncompressedUploadBytesEstimation, //Calculate the needed ACUs double neededAcus = threadCount * calculateNeededAcusFromByteSize(bytesPerThreads); - return neededAcus > maxAcus ? maxAcus : neededAcus; + return Math.min(neededAcus, maxAcus); } public int calculateNeededImportDBThreadCount(long uncompressedUploadBytesEstimation, int fileCount, int maxDbThreadCount) { @@ -103,7 +125,7 @@ public int calculateNeededImportDBThreadCount(long uncompressedUploadBytesEstima calculatedThreadCount = threadCnt == 0 ? 1 : threadCnt; } - return calculatedThreadCount > fileCount ? fileCount : calculatedThreadCount; + return Math.min(calculatedThreadCount, fileCount); } //Copy Related... @@ -118,7 +140,7 @@ public double calculateNeededAcusFromByteSize(long byteSize) { double requiredRAM = byteSize / GB_TO_BYTES; double neededAcus = requiredRAM / ACU_RAM; - return neededAcus > maxAcus ? maxAcus : neededAcus; + return Math.min(neededAcus, maxAcus); } //Index Related... @@ -172,7 +194,7 @@ private static double interpolate(double globalMax, double max, long real, doubl return max; else{ double interpolated = (real / globalMax) * max; - return interpolated < min ? min : interpolated; + return Math.max(interpolated, min); } } } diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/ExportSpaceToFiles.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/ExportSpaceToFiles.java index d15d2cab98..b14666e8f8 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/ExportSpaceToFiles.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/ExportSpaceToFiles.java @@ -19,53 +19,97 @@ package com.here.xyz.jobs.steps.impl.transport; +import static com.here.xyz.events.ContextAwareEvent.SpaceContext.EXTENSION; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.Phase.JOB_EXECUTOR; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.Phase.STEP_EXECUTE; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.Phase.STEP_ON_ASYNC_SUCCESS; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.Phase.STEP_ON_STATE_CHECK; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.buildProgressQuery; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.buildResetSuccessMarkerAndRunningOnesStatement; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.buildTemporaryJobTableCreateStatement; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.buildTemporaryJobTableDropStatement; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.buildTemporaryJobTableInsertStatements; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.createQueryContext; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.errorLog; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.getTemporaryJobTableName; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.infoLog; +import static com.here.xyz.util.web.XyzWebClient.WebClientException; + import com.fasterxml.jackson.annotation.JsonView; -import com.here.xyz.events.ContextAwareEvent; -import com.here.xyz.jobs.steps.execution.db.Database; +import com.here.xyz.events.ContextAwareEvent.SpaceContext; +import com.here.xyz.events.PropertiesQuery; +import com.here.xyz.jobs.datasets.filters.SpatialFilter; +import com.here.xyz.jobs.steps.S3DataFile; import com.here.xyz.jobs.steps.impl.SpaceBasedStep; +import com.here.xyz.jobs.steps.impl.tools.ResourceAndTimeCalculator; +import com.here.xyz.jobs.steps.outputs.DownloadUrl; import com.here.xyz.jobs.steps.outputs.FeatureStatistics; +import com.here.xyz.jobs.steps.outputs.FileStatistics; +import com.here.xyz.jobs.steps.resources.IOResource; import com.here.xyz.jobs.steps.resources.Load; -import com.here.xyz.models.geojson.implementation.Geometry; -import com.here.xyz.models.hub.Space; +import com.here.xyz.jobs.steps.resources.TooManyResourcesClaimed; +import com.here.xyz.models.hub.Ref; +import com.here.xyz.psql.query.GetFeaturesByGeometryBuilder; +import com.here.xyz.psql.query.GetFeaturesByGeometryBuilder.GetFeaturesByGeometryInput; +import com.here.xyz.psql.query.QueryBuilder.QueryBuildingException; import com.here.xyz.responses.StatisticsResponse; import com.here.xyz.util.db.SQLQuery; import com.here.xyz.util.service.BaseHttpServerVerticle.ValidationException; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - import java.sql.SQLException; +import java.util.ArrayList; import java.util.List; - -import static com.here.xyz.events.ContextAwareEvent.SpaceContext.EXTENSION; -import static com.here.xyz.jobs.steps.execution.db.Database.DatabaseRole.WRITER; -import static com.here.xyz.jobs.steps.execution.db.Database.loadDatabase; -import static com.here.xyz.util.web.XyzWebClient.WebClientException; +import java.util.Map; +import java.util.UUID; /** * This step imports a set of user provided inputs and imports their data into a specified space. * This step produces exactly one output of type {@link FeatureStatistics}. */ public class ExportSpaceToFiles extends SpaceBasedStep { - private static final Logger logger = LogManager.getLogger(); + //Defines how many features a source layer need to have to start parallelization. + public static final int PARALLELIZTATION_MIN_THRESHOLD = 10;//TODO: put back to 500k + //Defines how many export threads are getting used + public static final int PARALLELIZTATION_THREAD_COUNT = 8; - private Format format = Format.GEOJSON; - private Phase phase; + @JsonView({Internal.class, Static.class}) + private int calculatedThreadCount = -1; - //Geometry-Filters - private Geometry geometry; - private int radius = -1; - private boolean clipOnFilterGeometry; + @JsonView({Internal.class, Static.class}) + private double overallNeededAcus = -1; - //Content-Filters - private String propertyFilter; - private ContextAwareEvent.SpaceContext context; - private String targetVersion; + @JsonView({Internal.class, Static.class}) + private int estimatedSeconds = -1; - //Partitioning - private String partitionKey; - //Required if partitionKey=tileId - private Integer targetLevel; - private boolean clipOnPartitions; + @JsonView({Internal.class, Static.class}) + private boolean addStatisticsToUserOutput = true; + + private Format format = Format.GEOJSON; + + private SpatialFilter spatialFilter; + private PropertiesQuery propertyFilter; + private SpaceContext context; + + private Ref versionRef; + + /** + * TODO: + * Spatial-Filters + * DONE + * + * Content-Filters + * DONE private String propertyFilter; + * DONE private SpaceContext context; + * ? private String targetVersion; + * + * Version Filter: + * DONE private VersionRef versionRef; + * + * Partitioning - part of EMR? + * private String partitionKey; + * --Required if partitionKey=tileId + * private Integer targetLevel; + * private boolean clipOnPartitions; + */ public enum Format { CSV_JSON_WKB, @@ -73,21 +117,69 @@ public enum Format { GEOJSON; } - public enum Phase { - VALIDATE + public SpatialFilter getSpatialFilter() { + return spatialFilter; + } + + public void setSpatialFilter(SpatialFilter spatialFilter) { + this.spatialFilter = spatialFilter; + } + + public ExportSpaceToFiles withSpatialFilter(SpatialFilter spatialFilter) { + setSpatialFilter(spatialFilter); + return this; + } + + public PropertiesQuery getPropertyFilter() { + return propertyFilter; } - public void setFormat(Format format) { - this.format = format; + public void setPropertyFilter(PropertiesQuery propertyFilter) { + this.propertyFilter = propertyFilter; } - public ExportSpaceToFiles withFormat(Format format) { - setFormat(format); + public ExportSpaceToFiles withPropertyFilter(PropertiesQuery propertyFilter){ + setPropertyFilter(propertyFilter); return this; } - public Phase getPhase() { - return phase; + public SpaceContext getContext() { + return context == null ? EXTENSION :context; + } + + public void setContext(SpaceContext context) { + this.context = context; + } + + public ExportSpaceToFiles withContext(SpaceContext context) { + setContext(context); + return this; + } + + public Ref getVersionRef() { + return versionRef; + } + + public void setVersionRef(Ref versionRef) { + this.versionRef = versionRef; + } + + public ExportSpaceToFiles withVersionRef(Ref versionRef) { + setVersionRef(versionRef); + return this; + } + + public boolean isAddStatisticsToUserOutput() { + return addStatisticsToUserOutput; + } + + public void setAddStatisticsToUserOutput(boolean addStatisticsToUserOutput) { + this.addStatisticsToUserOutput = addStatisticsToUserOutput; + } + + public ExportSpaceToFiles withAddStatisticsToUserOutput(boolean addStatisticsToUserOutput) { + setAddStatisticsToUserOutput(addStatisticsToUserOutput); + return this; } @JsonView({Internal.class, Static.class}) @@ -95,17 +187,34 @@ public Phase getPhase() { @Override public List getNeededResources() { - return List.of(); + try { + statistics = statistics != null ? statistics : loadSpaceStatistics(getSpaceId(), context); + overallNeededAcus = overallNeededAcus != -1 ? + overallNeededAcus : ResourceAndTimeCalculator.getInstance().calculateNeededExportAcus(statistics.getDataSize().getValue()); + + infoLog(JOB_EXECUTOR, this,"Calculated ACUS: byteSize of layer: " + + statistics.getDataSize().getValue() + " => neededACUs:" + overallNeededAcus); + + return List.of(new Load().withResource(db()).withEstimatedVirtualUnits(overallNeededAcus), + new Load().withResource(IOResource.getInstance()).withEstimatedVirtualUnits(getUncompressedUploadBytesEstimation())); + }catch (Exception e){ + throw new RuntimeException(e); + } } @Override public int getTimeoutSeconds() { - return 0; + return 24 * 3600; } @Override public int getEstimatedExecutionSeconds() { - return 0; + if (estimatedSeconds == -1 && getSpaceId() != null) { + estimatedSeconds = ResourceAndTimeCalculator.getInstance() + .calculateExportTimeInSeconds(getSpaceId(), getUncompressedUploadBytesEstimation()); + infoLog(JOB_EXECUTOR, this,"Calculated estimatedSeconds: "+estimatedSeconds ); + } + return estimatedSeconds; } @Override @@ -113,22 +222,51 @@ public String getDescription() { return "Export data from space " + getSpaceId(); } + @Override + public ExecutionMode getExecutionMode() { + return ExecutionMode.ASYNC; + } + @Override public boolean validate() throws ValidationException { super.validate(); + try { - logger.info("VALIDATE"); - loadSpace(getSpaceId()); - statistics = loadSpaceStatistics(getSpaceId(), EXTENSION); - long featureCount = statistics.getCount().getValue(); - - /** - * @TODO: - * - Check if geometry is valid - * - Check searchableProperties - * - Check if targetVersion is valid - * - Check if targetLevel is valid - */ + statistics = statistics != null ? statistics : loadSpaceStatistics(getSpaceId(), context); + + //Validate input Geometry + if(this.spatialFilter != null) + this.spatialFilter.validateSpatialFilter(); + + //Validate versionRef + if(this.versionRef == null) + return true; + + Long minSpaceVersion = statistics.getMinVersion().getValue(); + Long maxSpaceVersion = statistics.getMaxVersion().getValue(); + + if(this.versionRef.isSingleVersion()){ + if(this.versionRef.getVersion() < minSpaceVersion) + throw new ValidationException("Invalid VersionRef! Version is smaller than min available version '"+ + minSpaceVersion+"'!"); + if(this.versionRef.getVersion() > maxSpaceVersion) + throw new ValidationException("Invalid VersionRef! Version is higher than max available version '"+ + maxSpaceVersion+"'!"); + }else if(this.versionRef.isRange()){ + if(this.versionRef.getStartVersion() < minSpaceVersion) + throw new ValidationException("Invalid VersionRef! StartVersion is smaller than min available version '"+ + minSpaceVersion+"'!"); + if(this.versionRef.getEndVersion() > maxSpaceVersion) + throw new ValidationException("Invalid VersionRef! EndVersion is higher than max available version '"+ + maxSpaceVersion+"'!"); + } + + + //TODO: Check if property validation is needed - in sense of searchableProperties +// if(statistics.getCount().getValue() > 1_000_000 && getPropertyFilter() != null){ +// getPropertyFilter().getQueryKeys() +// throw new ValidationException("is not a searchable property"); +// } } catch (WebClientException e) { throw new ValidationException("Error loading resource " + getSpaceId(), e); @@ -138,73 +276,161 @@ public boolean validate() throws ValidationException { @Override public void execute() throws Exception { - logger.info("EXECUTE"); - logger.info( "Loading space config for space "+getSpaceId()); - Space space = loadSpace(getSpaceId()); - logger.info("Getting storage database for space "+getSpaceId()); - Database db = loadDatabase(space.getStorage().getId(), WRITER); + statistics = statistics != null ? statistics : loadSpaceStatistics(getSpaceId(), context); + calculatedThreadCount = (statistics.getCount().getValue() > PARALLELIZTATION_MIN_THRESHOLD) ? PARALLELIZTATION_THREAD_COUNT : 1; + + List s3FileNames = generateS3FileNames(calculatedThreadCount); + createAndFillTemporaryJobTable(s3FileNames); + + for (int i = 0; i < calculatedThreadCount; i++) { + infoLog(STEP_EXECUTE, this,"Start export thread number: " + i ); + runReadQueryAsync(buildExportQuery(i), db(), 0,false); + } } @Override public void resume() throws Exception { + //TODO + } + @Override + protected void onAsyncSuccess() throws Exception { + //TODO + super.onAsyncSuccess(); + + FileStatistics statistics = runReadQuerySync(buildStatisticDataOfTemporaryTableQuery(), db(), + 0, rs -> rs.next() + ? new FileStatistics() + .withBytesExported(rs.getLong("bytes_uploaded")) + .withRowsExported(rs.getLong("rows_uploaded")) + .withFilesCreated(rs.getInt("files_uploaded")) + : new FileStatistics()); + + infoLog(STEP_ON_ASYNC_SUCCESS, this,"Job Statistics: bytes=" + statistics.getExportedBytes() + " files=" + statistics.getExportedFiles()); + if(addStatisticsToUserOutput) + registerOutputs(List.of(statistics), true); + + infoLog(STEP_ON_ASYNC_SUCCESS, this,"Cleanup temporary table"); + runWriteQuerySync(buildTemporaryJobTableDropStatement(getSchema(db()), getTemporaryJobTableName(getId())), db(), 0); } - private SQLQuery buildTemporaryTableForImportQuery(String schema) { - return new SQLQuery(""" - CREATE TABLE IF NOT EXISTS ${schema}.${table} - ( - s3_bucket text NOT NULL, - s3_path text NOT NULL, - s3_region text NOT NULL, - content_query text, --tileId/s3_path - state text NOT NULL, --jobtype - execution_count int DEFAULT 0, --amount of retries - data jsonb COMPRESSION lz4, --statistic data //getRowsUploaded getFilesUploaded getBytesUploaded - i SERIAL, - CONSTRAINT ${primaryKey} PRIMARY KEY (s3_path) - ); - """) - .withVariable("table", TransportTools.getTemporaryJobTableName(this)) - .withVariable("schema", schema) - .withVariable("primaryKey", TransportTools.getTemporaryJobTableName(this) + "_primKey"); - } - - private SQLQuery generateFilteredExportQuery( - SQLQuery customWhereCondition, - boolean isForCompositeContentDetection, - String partitionKey, - Boolean omitOnNull ) - throws SQLException { - - return null; - } - - public SQLQuery buildS3ExportQuery(String s3Bucket, String s3Path, String s3FilePrefix, String s3Region) { - s3Path = s3Path+ "/" +(s3FilePrefix == null ? "" : s3FilePrefix)+"export"; - - SQLQuery exportSelectString = new SQLQuery(""); - String exportOptions = ""; - - if(format.equals(Format.GEOJSON)){ - exportOptions = " 'FORMAT TEXT, ENCODING ''UTF8'' '"; - s3Path += ".geojson"; - }else { - exportOptions = "'format csv,delimiter '','', encoding ''UTF8'', quote ''\"'', escape '''''''' '"; - s3Path += ".csv"; + @Override + protected boolean onAsyncFailure() { + //TODO + return super.onAsyncFailure(); + } + + @Override + protected void onStateCheck() { + try { + runReadQuerySync(buildProgressQuery(getSchema(db()), this), db(), 0, + rs -> { + rs.next(); + + float progress = rs.getFloat("progress"); + long processedBytes = rs.getLong("processed_bytes"); + int finishedCnt = rs.getInt("finished_cnt"); + int failedCnt = rs.getInt("failed_cnt"); + + getStatus().setEstimatedProgress(progress); + + infoLog(STEP_ON_STATE_CHECK,this,"Progress[" + progress + "] => " + " processedBytes:" + + processedBytes + " ,finishedCnt:" + finishedCnt + " ,failedCnt:" + failedCnt); + return progress; + }); + } + catch (Exception e) { + //TODO: What to do? Only log? Report Status is not that important. Further Ignore "table does not exists error" - report 0 in this case. + errorLog(STEP_ON_STATE_CHECK, this, e); + } + } + + private List generateS3FileNames(int cnt){ + List urlList = new ArrayList<>(); + + for (int i = 1; i <= calculatedThreadCount; i++) { + urlList.add(new DownloadUrl().withS3Key(outputS3Prefix(!isUseSystemOutput(),false) + "/" + i + "/" + UUID.randomUUID())); + } + + return urlList; + } + + private void createAndFillTemporaryJobTable(List s3FileNames) throws SQLException, TooManyResourcesClaimed, WebClientException { + if (isResume()) { + infoLog(STEP_EXECUTE, this,"Reset SuccessMarker"); + runWriteQuerySync(buildResetSuccessMarkerAndRunningOnesStatement(getSchema(db()) ,this), db(), 0); } + else { + infoLog(STEP_EXECUTE, this,"Create temporary job table"); + runWriteQuerySync(buildTemporaryJobTableCreateStatement(getSchema(db()), this), db(), 0); - SQLQuery q = new SQLQuery("SELECT * from aws_s3.query_export_to_s3("+ - " ${{exportSelectString}},"+ - " aws_commons.create_s3_uri(#{s3Bucket}, #{s3Path}, #{s3Region}),"+ - " options := "+exportOptions+");" + infoLog(STEP_EXECUTE, this,"Fill temporary job table"); + runBatchWriteQuerySync(SQLQuery.batchOf(buildTemporaryJobTableInsertStatements(getSchema(db()), + s3FileNames, bucketRegion(),this)), db(), 0 ); + } + } + + private String generateFilteredExportQuery(int threadNumber) throws WebClientException, TooManyResourcesClaimed, QueryBuildingException { + GetFeaturesByGeometryBuilder queryBuilder = new GetFeaturesByGeometryBuilder() + .withDataSourceProvider(requestResource(db(), 0)); + + GetFeaturesByGeometryInput input = new GetFeaturesByGeometryInput( + getSpaceId(), + context == null ? EXTENSION : context, + space().getVersionsToKeep(), + versionRef, + spatialFilter != null ? spatialFilter.getGeometry() : null, + spatialFilter != null ? spatialFilter.getRadius() : 0, + spatialFilter != null && spatialFilter.isClip(), + propertyFilter ); - q.setQueryFragment("exportSelectString", exportSelectString); - q.setNamedParameter("s3Bucket",s3Bucket); - q.setNamedParameter("s3Path",s3Path); - q.setNamedParameter("s3Region",s3Region); + SQLQuery threadCondition = new SQLQuery("i % #{threadCount} = #{threadNumber}") + .withNamedParameter("threadCount", calculatedThreadCount) + .withNamedParameter("threadNumber", threadNumber); + + return queryBuilder + .withAdditionalFilterFragment(threadCondition) + .buildQuery(input) + .toExecutableQueryString(); + } + + public SQLQuery buildExportQuery(int threadNumber) throws WebClientException, TooManyResourcesClaimed, + QueryBuildingException { + String exportSelectString = generateFilteredExportQuery(threadNumber); + + SQLQuery successQuery = buildSuccessCallbackQuery(); + SQLQuery failureQuery = buildFailureCallbackQuery(); + + return new SQLQuery( + "CALL execute_transfer(#{format}, '${{successQuery}}', '${{failureQuery}}', #{contentQuery});") + .withContext(getQueryContext()) + .withAsyncProcedure(true) + .withNamedParameter("format", format.toString()) + .withQueryFragment("successQuery", successQuery.substitute().text().replaceAll("'", "''")) + .withQueryFragment("failureQuery", failureQuery.substitute().text().replaceAll("'", "''")) + .withNamedParameter("contentQuery", exportSelectString); + } + + private SQLQuery buildStatisticDataOfTemporaryTableQuery() throws WebClientException { + return new SQLQuery(""" + SELECT sum((data->'export_statistics'->'rows_uploaded')::bigint) as rows_uploaded, + sum(CASE + WHEN (data->'export_statistics'->'bytes_uploaded')::bigint > 0 + THEN (data->'export_statistics'->'files_uploaded')::bigint + ELSE 0 + END) as files_uploaded, + sum((data->'export_statistics'->'bytes_uploaded')::bigint) as bytes_uploaded + FROM ${schema}.${tmpTable} + WHERE POSITION('SUCCESS_MARKER' in state) = 0; + """) + .withVariable("schema", getSchema(db())) + .withVariable("tmpTable", getTemporaryJobTableName(getId())) + .withVariable("triggerTable", TransportTools.getTemporaryTriggerTableName(getId())); + } - return q; + private Map getQueryContext() throws WebClientException { + String superTable = space().getExtension() != null ? getRootTableName(superSpace()) : null; + return createQueryContext(getId(), getSchema(db()), getRootTableName(space()), (space().getVersionsToKeep() > 1), superTable); } } diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/ImportFilesToSpace.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/ImportFilesToSpace.java index ea10ddf59a..da63ce4e0b 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/ImportFilesToSpace.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/ImportFilesToSpace.java @@ -23,29 +23,34 @@ import static com.here.xyz.jobs.datasets.space.UpdateStrategy.DEFAULT_UPDATE_STRATEGY; import static com.here.xyz.jobs.steps.execution.LambdaBasedStep.ExecutionMode.ASYNC; import static com.here.xyz.jobs.steps.execution.LambdaBasedStep.ExecutionMode.SYNC; -import static com.here.xyz.jobs.steps.execution.db.Database.DatabaseRole.WRITER; -import static com.here.xyz.jobs.steps.execution.db.Database.loadDatabase; import static com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.EntityPerLine.Feature; import static com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.EntityPerLine.FeatureCollection; import static com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.Format.CSV_GEOJSON; import static com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.Format.CSV_JSON_WKB; import static com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.Format.GEOJSON; -import static com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.Phase.CREATE_TMP_TABLE; -import static com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.Phase.EXECUTE_IMPORT; -import static com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.Phase.FILL_TMP_TABLE; -import static com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.Phase.RESET_SUCCESS_MARKER; -import static com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.Phase.RETRIEVE_NEW_VERSION; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.Phase.JOB_EXECUTOR; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.Phase.JOB_VALIDATE; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.Phase.STEP_EXECUTE; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.Phase.STEP_ON_ASYNC_SUCCESS; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.Phase.STEP_ON_STATE_CHECK; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.buildTemporaryJobTableDropStatement; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.buildTemporaryJobTableInsertStatements; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.buildProgressQuery; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.buildResetSuccessMarkerAndRunningOnesStatement; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.buildTemporaryJobTableCreateStatement; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.createQueryContext; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.errorLog; import static com.here.xyz.jobs.steps.impl.transport.TransportTools.getTemporaryJobTableName; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.getTemporaryTriggerTableName; +import static com.here.xyz.jobs.steps.impl.transport.TransportTools.infoLog; import static com.here.xyz.util.web.XyzWebClient.WebClientException; -import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonView; import com.here.xyz.jobs.datasets.space.UpdateStrategy; import com.here.xyz.jobs.steps.S3DataFile; -import com.here.xyz.jobs.steps.execution.db.Database; import com.here.xyz.jobs.steps.impl.SpaceBasedStep; import com.here.xyz.jobs.steps.impl.tools.ResourceAndTimeCalculator; -import com.here.xyz.jobs.steps.inputs.Input; +import com.here.xyz.jobs.steps.impl.transport.tools.ImportFilesQuickValidator; import com.here.xyz.jobs.steps.inputs.UploadUrl; import com.here.xyz.jobs.steps.outputs.FeatureStatistics; import com.here.xyz.jobs.steps.resources.IOResource; @@ -57,19 +62,14 @@ import com.here.xyz.util.db.SQLQuery; import com.here.xyz.util.service.BaseHttpServerVerticle.ValidationException; import com.here.xyz.util.service.Core; -import io.vertx.core.json.JsonObject; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.sql.SQLException; -import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.zip.GZIPInputStream; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.locationtech.jts.io.ParseException; @@ -79,8 +79,7 @@ */ public class ImportFilesToSpace extends SpaceBasedStep { - private static final Logger logger = LogManager.getLogger(); - private static final long MAX_INPUT_BYTES_FOR_NON_EMPTY_IMPORT = 10 * 1024 * 1024 * 1024l; + private static final long MAX_INPUT_BYTES_FOR_NON_EMPTY_IMPORT = 10 * 1024 * 1024 * 1024; private static final long MAX_INPUT_BYTES_FOR_SYNC_IMPORT = 100 * 1024 * 1024; private static final long MAX_INPUT_BYTES_FOR_KEEP_INDICES = 1 * 1024 * 1024 * 1024; private static final int MIN_FEATURE_COUNT_IN_TARGET_TABLE_FOR_KEEP_INDICES = 5_000_000; @@ -88,8 +87,6 @@ public class ImportFilesToSpace extends SpaceBasedStep { private Format format = GEOJSON; - private Phase phase; - @JsonView({Internal.class, Static.class}) private double overallNeededAcus = -1; @@ -111,13 +108,6 @@ public class ImportFilesToSpace extends SpaceBasedStep { @JsonView({Internal.class, Static.class}) private EntityPerLine entityPerLine = Feature; - @JsonIgnore - private Space space; - @JsonIgnore - private Space superSpace; - @JsonIgnore - private Database db; - public Format getFormat() { return format; } @@ -222,7 +212,7 @@ public int getEstimatedExecutionSeconds() { if (estimatedSeconds == -1 && getSpaceId() != null) { estimatedSeconds = ResourceAndTimeCalculator.getInstance() .calculateImportTimeInSeconds(getSpaceId(), getUncompressedUploadBytesEstimation(), getExecutionMode()); - logger.info("[{}] Import estimatedSeconds {}", getGlobalStepId(), estimatedSeconds); + infoLog(JOB_EXECUTOR, this, "Calculated estimatedSeconds: "+estimatedSeconds ); } return estimatedSeconds; } @@ -257,7 +247,7 @@ public void deleteOutputs() { public boolean validate() throws ValidationException { super.validate(); try { - logAndSetPhase(Phase.VALIDATE); + infoLog(JOB_VALIDATE, this); //Check if the space is actually existing Space space = space(); if(space.isReadOnly()) @@ -286,28 +276,6 @@ public boolean validate() throws ValidationException { return true; } - private void logAndSetPhase(Phase newPhase, String... messages) { - if (newPhase != null) - phase = newPhase; - logger.info("[{}@{}] ON/INTO '{}' {}", getGlobalStepId(), getPhase(), getSpaceId(), messages.length > 0 ? messages : ""); - } - - private Space space() throws WebClientException { - if (space == null) { - log("Loading space config for space " + getSpaceId()); - space = loadSpace(getSpaceId()); - } - return space; - } - - public Phase getPhase() { - return phase; - } - - private void log(String... messages) { - logAndSetPhase(null, messages); - } - @Override public void execute() throws WebClientException, SQLException, TooManyResourcesClaimed, IOException, ParseException, InterruptedException { @@ -318,19 +286,17 @@ private void _execute(boolean isResume) throws WebClientException, SQLException, if (getExecutionMode() == SYNC) syncExecution(); else { - log("Importing input files for job " + getJobId() + " into space " + getSpaceId() + " ..."); - //TODO: Move resume logic into #resume() if (!isResume) { - logAndSetPhase(Phase.SET_READONLY); + infoLog(STEP_EXECUTE, this, "Set ReadOnly"); hubWebClient().patchSpace(getSpaceId(), Map.of("readOnly", true)); - logAndSetPhase(RETRIEVE_NEW_VERSION); + infoLog(STEP_EXECUTE, this,"Retrieve new version"); long newVersion = increaseVersionSequence(); - logAndSetPhase(Phase.CREATE_TRIGGER); //FIXME: Use owner of the job + infoLog(STEP_EXECUTE, this,"Create TriggerTable and Trigger"); //Create Temp-ImportTable to avoid deserialization of JSON and fix missing row count - runBatchWriteQuerySync(buildTemporaryTriggerTableBlock(space.getOwner(), newVersion), db(), 0); + runBatchWriteQuerySync(buildTemporaryTriggerTableBlock(space().getOwner(), newVersion), db(), 0); } createAndFillTemporaryJobTable(); @@ -340,23 +306,21 @@ private void _execute(boolean isResume) throws WebClientException, SQLException, MAX_DB_THREAD_COUNT); double neededAcusForOneThread = calculateNeededAcus(1); - logAndSetPhase(EXECUTE_IMPORT); - for (int i = 1; i <= calculatedThreadCount; i++) { - logAndSetPhase(EXECUTE_IMPORT, "Start Import Thread number " + i); - runReadQueryAsync(buildImportQueryBlock(), db(), neededAcusForOneThread, false); + infoLog(STEP_EXECUTE, this,"Start Import Thread number " + i); + runReadQueryAsync(buildImportQuery(), db(), neededAcusForOneThread, false); } } } private void syncExecution() throws WebClientException, SQLException, TooManyResourcesClaimed, IOException { //TODO: Support resume - logAndSetPhase(RETRIEVE_NEW_VERSION); + infoLog(STEP_EXECUTE, this,"Retrieve new version"); long newVersion = increaseVersionSequence(); long featureCount = 0; for (S3DataFile input : loadStepInputs()) { - logger.info("[{}] Sync write from {} to {}", getGlobalStepId(), input.getS3Key(), getSpaceId()); + infoLog(STEP_EXECUTE, this,"Sync write of file:"+ input.getS3Key()); featureCount += syncWriteFileToSpace(input, newVersion); } registerOutputs(List.of(new FeatureStatistics().withFeatureCount(featureCount).withByteSize(getUncompressedUploadBytesEstimation())), @@ -404,42 +368,25 @@ private long increaseVersionSequence() throws SQLException, TooManyResourcesClai }); } - private Space superSpace() throws WebClientException { - if (superSpace == null) { - log("Loading space config for super-space " + getSpaceId()); - if (space().getExtension() == null) - throw new IllegalStateException("The space does not extend some other space. Could not load the super space."); - superSpace = loadSpace(space().getExtension().getSpaceId()); - } - return superSpace; - } - - private Database db() throws WebClientException { - if (db == null) { - log("Loading storage database for space " + getSpaceId()); - db = loadDatabase(space().getStorage().getId(), WRITER); - } - return db; - } - private void createAndFillTemporaryJobTable() throws SQLException, TooManyResourcesClaimed, WebClientException { if (isResume()) { - logAndSetPhase(RESET_SUCCESS_MARKER); - runWriteQuerySync(resetSuccessMarkerAndRunningOnes(getSchema(db)), db, 0); + infoLog(STEP_EXECUTE, this,"Reset SuccessMarker"); + runWriteQuerySync(buildResetSuccessMarkerAndRunningOnesStatement(getSchema(db()) ,this), db(), 0); } else { - logAndSetPhase(CREATE_TMP_TABLE); - runWriteQuerySync(buildTemporaryTableForImportQuery(getSchema(db)), db, 0); + infoLog(STEP_EXECUTE, this,"Create temporary job table"); + runWriteQuerySync(buildTemporaryJobTableCreateStatement(getSchema(db()), this), db(), 0); - logAndSetPhase(FILL_TMP_TABLE); - fillTemporaryTableWithInputs(db, loadStepInputs(), bucketRegion()); + infoLog(STEP_EXECUTE, this,"Fill temporary job table"); + runBatchWriteQuerySync(SQLQuery.batchOf(buildTemporaryJobTableInsertStatements(getSchema(db()), + loadStepInputs(), bucketRegion(),this)), db(), 0 ); } } @Override protected void onStateCheck() { try { - runReadQuerySync(buildProgressQuery(getSchema(db())), db(), 0, + runReadQuerySync(buildProgressQuery(getSchema(db()), this), db(), 0, rs -> { rs.next(); @@ -450,14 +397,14 @@ protected void onStateCheck() { getStatus().setEstimatedProgress(progress); - log("Progress[" + progress + "] => " - + " processedBytes:" + processedBytes + " ,finishedCnt:" + finishedCnt + " ,failedCnt:" + failedCnt); + infoLog(STEP_ON_STATE_CHECK,this,"Progress[" + progress + "] => " + " processedBytes:" + + processedBytes + " ,finishedCnt:" + finishedCnt + " ,failedCnt:" + failedCnt); return progress; }); } catch (Exception e) { //TODO: What to do? Only log? Report Status is not that important. Further Ignore "table does not exists error" - report 0 in this case. - logger.error(e); + errorLog(STEP_ON_STATE_CHECK, this, e); } } @@ -465,20 +412,17 @@ protected void onStateCheck() { protected void onAsyncSuccess() throws WebClientException, SQLException, TooManyResourcesClaimed, IOException { try { - - logAndSetPhase(Phase.RETRIEVE_STATISTICS); FeatureStatistics statistics = runReadQuerySync(buildStatisticDataOfTemporaryTableQuery(), db(), 0, rs -> rs.next() ? new FeatureStatistics().withFeatureCount(rs.getLong("imported_rows")).withByteSize(rs.getLong("imported_bytes")) : new FeatureStatistics()); - log("Statistics: bytes=" + statistics.getByteSize() + " rows=" + statistics.getFeatureCount()); - logAndSetPhase(Phase.WRITE_STATISTICS); + infoLog(STEP_ON_ASYNC_SUCCESS, this,"Job Statistics: bytes=" + statistics.getByteSize() + " rows=" + statistics.getFeatureCount()); registerOutputs(List.of(statistics), true); cleanUpDbRelatedResources(); - logAndSetPhase(Phase.RELEASE_READONLY); + infoLog(STEP_ON_ASYNC_SUCCESS, this,"Release READONLY"); hubWebClient().patchSpace(getSpaceId(), Map.of( "readOnly", false, "contentUpdatedAt", Core.currentTimeMillis() @@ -489,7 +433,7 @@ protected void onAsyncSuccess() throws WebClientException, //relation "*_job_data" does not exist - can happen when we have received twice a SUCCESS_CALLBACK //TODO: Find out the cases in which that could happen and prevent it from happening if (e.getSQLState() != null && e.getSQLState().equals("42P01")) { - log("_job_data table got already deleted!"); + errorLog(STEP_ON_ASYNC_SUCCESS, this, e, "_job_data table got already deleted!"); return; } throw e; @@ -497,9 +441,11 @@ protected void onAsyncSuccess() throws WebClientException, } private void cleanUpDbRelatedResources() throws TooManyResourcesClaimed, SQLException, WebClientException { - logAndSetPhase(Phase.DROP_TMP_TABLE); - runWriteQuerySync(buildDropTemporaryTableForImportQuery(), db(), 0); - runWriteQuerySync(buildDropTemporaryTriggerTableForImportQuery(), db(), 0); + infoLog(STEP_ON_ASYNC_SUCCESS, this, "Clean up database resources"); + runBatchWriteQuerySync(SQLQuery.batchOf( + buildTemporaryJobTableDropStatement(getSchema(db()), getTemporaryJobTableName(getId())), + buildTemporaryJobTableDropStatement(getSchema(db()), getTemporaryTriggerTableName(getId())) + ), db(), 0); } @Override @@ -523,79 +469,6 @@ public void resume() throws Exception { _execute(true); } - private SQLQuery buildTemporaryTableForImportQuery(String schema) { - return new SQLQuery(""" - CREATE TABLE IF NOT EXISTS ${schema}.${table} - ( - s3_bucket text NOT NULL, - s3_path text NOT NULL, - s3_region text NOT NULL, - state text NOT NULL, --jobtype - execution_count int DEFAULT 0, --amount of retries - data jsonb COMPRESSION lz4, --statistic data - i SERIAL, - CONSTRAINT ${primaryKey} PRIMARY KEY (s3_path) - ); - """) - .withVariable("table", getTemporaryJobTableName(this)) - .withVariable("schema", schema) - .withVariable("primaryKey", getTemporaryJobTableName(this) + "_primKey"); - } - - private void fillTemporaryTableWithInputs(Database db, List inputs, String bucketRegion) - throws SQLException, TooManyResourcesClaimed { - List queryList = new ArrayList<>(); - for (S3DataFile input : inputs) { - if (input instanceof UploadUrl uploadUrl) { - JsonObject data = new JsonObject() - .put("compressed", uploadUrl.isCompressed()) - .put("filesize", uploadUrl.getByteSize()); - - queryList.add( - new SQLQuery(""" - INSERT INTO ${schema}.${table} (s3_bucket, s3_path, s3_region, state, data) - VALUES (#{bucketName}, #{s3Key}, #{bucketRegion}, #{state}, #{data}::jsonb) - ON CONFLICT (s3_path) DO NOTHING; - """) //TODO: Why would we ever have a conflict here? Why to fill the table again on resume()? - .withVariable("schema", getSchema(db)) - .withVariable("table", getTemporaryJobTableName(this)) - .withNamedParameter("s3Key", input.getS3Key()) - .withNamedParameter("bucketName", input.getS3Bucket()) - .withNamedParameter("bucketRegion", bucketRegion) - .withNamedParameter("state", "SUBMITTED") - .withNamedParameter("data", data.toString()) - ); - } - } - //Add final entry - queryList.add( - new SQLQuery(""" - INSERT INTO ${schema}.${table} (s3_bucket, s3_path, s3_region, state, data) - VALUES (#{bucketName}, #{s3Key}, #{bucketRegion}, #{state}, #{data}::jsonb) - ON CONFLICT (s3_path) DO NOTHING; - """) //TODO: Why would we ever have a conflict here? Why to fill the table again on resume()? - .withVariable("schema", getSchema(db)) - .withVariable("table", getTemporaryJobTableName(this)) - .withNamedParameter("s3Key", "SUCCESS_MARKER") - .withNamedParameter("bucketName", "SUCCESS_MARKER") - .withNamedParameter("state", "SUCCESS_MARKER") - .withNamedParameter("bucketRegion", "SUCCESS_MARKER") - .withNamedParameter("data", "{}")); - runBatchWriteQuerySync(SQLQuery.batchOf(queryList), db, 0); - } - - private SQLQuery buildDropTemporaryTableForImportQuery() throws WebClientException { - return new SQLQuery("DROP TABLE IF EXISTS ${schema}.${table};") - .withVariable("table", getTemporaryJobTableName(this)) - .withVariable("schema", getSchema(db())); - } - - private SQLQuery buildDropTemporaryTriggerTableForImportQuery() throws WebClientException { - return new SQLQuery("DROP TABLE IF EXISTS ${schema}.${table};") - .withVariable("table", TransportTools.getTemporaryTriggerTableName(this)) - .withVariable("schema", getSchema(db())); - } - private SQLQuery buildTemporaryTriggerTableForImportQuery() throws WebClientException { String tableFields = "jsondata TEXT, " @@ -604,7 +477,7 @@ private SQLQuery buildTemporaryTriggerTableForImportQuery() throws WebClientExce return new SQLQuery("CREATE TABLE IF NOT EXISTS ${schema}.${table} (${{tableFields}} )") .withQueryFragment("tableFields", tableFields) .withVariable("schema", getSchema(db())) - .withVariable("table", TransportTools.getTemporaryTriggerTableName(this)); + .withVariable("table", TransportTools.getTemporaryTriggerTableName(getId())); } private SQLQuery buildCreateImportTrigger(String targetAuthor, long newVersion) throws WebClientException { @@ -621,21 +494,21 @@ private SQLQuery buildTemporaryTriggerTableBlock(String targetAuthor, long newVe } private SQLQuery buildCreateImportTriggerForEmptyLayer(String targetAuthor, long targetSpaceVersion) throws WebClientException { - String triggerFunction = "xyz_import_trigger_for_empty_layer"; + String triggerFunction = "import_from_s3_trigger_for_empty_layer"; triggerFunction += entityPerLine == FeatureCollection ? "_geojsonfc" : ""; return new SQLQuery("CREATE OR REPLACE TRIGGER insertTrigger BEFORE INSERT ON ${schema}.${table} " - + "FOR EACH ROW EXECUTE PROCEDURE ${schema}.${triggerFunction}('${{author}}', ${{spaceVersion}}, '${{targetTable}}');") + + "FOR EACH ROW EXECUTE PROCEDURE ${triggerFunction}('${{author}}', ${{spaceVersion}}, '${{targetTable}}');") .withQueryFragment("spaceVersion", "" + targetSpaceVersion) .withQueryFragment("author", targetAuthor) - .withQueryFragment("targetTable", getRootTableName(space)) + .withQueryFragment("targetTable", getRootTableName(space())) .withVariable("triggerFunction", triggerFunction) .withVariable("schema", getSchema(db())) - .withVariable("table", TransportTools.getTemporaryTriggerTableName(this)); + .withVariable("table", TransportTools.getTemporaryTriggerTableName(getId())); } private SQLQuery buildCreateImportTriggerForNonEmptyLayer(String author, long newVersion) throws WebClientException { - String triggerFunction = "xyz_import_trigger_for_non_empty_layer"; + String triggerFunction = "import_from_s3_trigger_for_non_empty_layer"; String superTable = space().getExtension() != null ? getRootTableName(superSpace()) : null; //TODO: Check if we can forward the whole transaction to the FeatureWriter rather than doing it for each row @@ -670,10 +543,10 @@ private SQLQuery buildCreateImportTriggerForNonEmptyLayer(String author, long ne .withQueryFragment("extendedTable", superTable == null ? "NULL" : "'" + superTable + "'") .withQueryFragment("format", format.toString()) .withQueryFragment("entityPerLine", entityPerLine.toString()) - .withQueryFragment("targetTable", getRootTableName(space)) + .withQueryFragment("targetTable", getRootTableName(space())) .withVariable("schema", getSchema(db())) .withVariable("triggerFunction", triggerFunction) - .withVariable("table", TransportTools.getTemporaryTriggerTableName(this)); + .withVariable("table", TransportTools.getTemporaryTriggerTableName(getId())); } //TODO: Move to XyzSpaceTableHelper or so (it's the nth time we have that implemented somewhere) @@ -692,74 +565,27 @@ SELECT sum((data->'filesize')::bigint) as imported_bytes, WHERE POSITION('SUCCESS_MARKER' in state) = 0; """) .withVariable("schema", getSchema(db())) - .withVariable("tmpTable", getTemporaryJobTableName(this)) - .withVariable("triggerTable", TransportTools.getTemporaryTriggerTableName(this)); - } - - private SQLQuery buildProgressQuery(String schema) { - return new SQLQuery(""" - SELECT - COALESCE(processed_bytes/overall_bytes, 0) as progress, - COALESCE(processed_bytes,0) as processed_bytes, - COALESCE(finished_cnt,0) as finished_cnt, - COALESCE(failed_cnt,0) as failed_cnt - FROM( - SELECT - (SELECT sum((data->'filesize')::bigint ) FROM ${schema}.${table}) as overall_bytes, - sum((data->'filesize')::bigint ) as processed_bytes, - sum((state = 'FINISHED')::int) as finished_cnt, - sum((state = 'FAILED')::int) as failed_cnt - FROM ${schema}.${table} - WHERE POSITION('SUCCESS_MARKER' in state) = 0 - AND state IN ('FINISHED','FAILED') - )A - """) - .withVariable("schema", schema) - .withVariable("table", getTemporaryJobTableName(this)); + .withVariable("tmpTable", getTemporaryJobTableName(getId())) + .withVariable("triggerTable", TransportTools.getTemporaryTriggerTableName(getId())); } private SQLQuery buildImportQuery() throws WebClientException { - String schema = getSchema(db()); SQLQuery successQuery = buildSuccessCallbackQuery(); SQLQuery failureQuery = buildFailureCallbackQuery(); - return new SQLQuery( - "CALL xyz_import_start(#{schema}, #{temporary_tbl}::regclass, #{target_tbl}::regclass, #{format}, '${{successQuery}}', '${{failureQuery}}');") - .withAsyncProcedure(true) - .withNamedParameter("schema", schema) - .withNamedParameter("target_tbl", schema + ".\"" + TransportTools.getTemporaryTriggerTableName(this) + "\"") - .withNamedParameter("temporary_tbl", schema + ".\"" + (getTemporaryJobTableName(this)) + "\"") - .withNamedParameter("format", format.toString()) - .withQueryFragment("successQuery", successQuery.substitute().text().replaceAll("'", "''")) - .withQueryFragment("failureQuery", failureQuery.substitute().text().replaceAll("'", "''")) - .withContext(getQueryContext()); - } - private SQLQuery buildImportQueryBlock() throws WebClientException { - /** - * TODO: - * The idea was to uses context with asyncify. The integration in "_create_asyncify_query_block" (same - * principal as with xzy.password) has not worked. If we find a solution with asyncify we can use the block - * query - if not, we can simply use buildImportQuery() - */ - return new SQLQuery("${{importQuery}}") - .withAsyncProcedure(true) - .withQueryFragment("importQuery", buildImportQuery()); + return new SQLQuery( + "CALL execute_transfer(#{format}, '${{successQuery}}', '${{failureQuery}}');") + .withContext(getQueryContext()) + .withAsyncProcedure(true) + .withNamedParameter("format", format.toString()) + .withQueryFragment("successQuery", successQuery.substitute().text().replaceAll("'", "''")) + .withQueryFragment("failureQuery", failureQuery.substitute().text().replaceAll("'", "''")); } private Map getQueryContext() throws WebClientException { String superTable = space().getExtension() != null ? getRootTableName(superSpace()) : null; - - final Map queryContext = new HashMap<>(Map.of( - "schema", getSchema(db()), - "table", getRootTableName(space()), - "context", superTable != null ? "'DEFAULT'" : "NULL", - "historyEnabled", (space().getVersionsToKeep() > 1) - )); - - if (superTable != null) - queryContext.put("extendedTable", superTable); - return queryContext; + return createQueryContext(getId(), getSchema(db()), getRootTableName(space()), (space().getVersionsToKeep() > 1), superTable); } private SQLQuery buildFeatureWriterQuery(String featureList, long targetVersion) throws WebClientException { @@ -776,7 +602,7 @@ SELECT write_features( #{returnResult} );""") .withNamedParameter("featureList", featureList) - .withNamedParameter("author", space.getOwner()) + .withNamedParameter("author", space().getOwner()) .withNamedParameter("onExists", updateStrategy.onExists() == null ? null : updateStrategy.onExists().toString()) .withNamedParameter("onNotExists", updateStrategy.onNotExists() == null ? null : updateStrategy.onNotExists().toString()) .withNamedParameter("onVersionConflict", @@ -791,19 +617,7 @@ SELECT write_features( return writeFeaturesQuery; } - private SQLQuery resetSuccessMarkerAndRunningOnes(String schema) { - return new SQLQuery(""" - UPDATE ${schema}.${table} - SET state = - CASE - WHEN state = 'SUCCESS_MARKER_RUNNING' THEN 'SUCCESS_MARKER' - WHEN state = 'RUNNING' THEN 'SUBMITTED' - END - WHERE state IN ('SUCCESS_MARKER_RUNNING', 'RUNNING'); - """) - .withVariable("schema", schema) - .withVariable("table", getTemporaryJobTableName(this)); - } + private double calculateNeededAcus(int threadCount) { double neededACUs; @@ -817,8 +631,8 @@ private double calculateNeededAcus(int threadCount) { neededACUs = ResourceAndTimeCalculator.getInstance().calculateNeededImportAcus( getUncompressedUploadBytesEstimation(), fileCount, threadCount); - logAndSetPhase(Phase.CALCULATE_ACUS, - "expectedMemoryConsumption: " + getUncompressedUploadBytesEstimation() + " => neededACUs:" + neededACUs); + infoLog(JOB_EXECUTOR, this, "Calculated ACUS: expectedMemoryConsumption: " + + getUncompressedUploadBytesEstimation() + " => neededACUs:" + neededACUs); return neededACUs; } @@ -833,21 +647,4 @@ public enum Format { CSV_JSON_WKB, GEOJSON; } - - public enum Phase { - VALIDATE, - CALCULATE_ACUS, - SET_READONLY, - RETRIEVE_NEW_VERSION, - CREATE_TRIGGER, - CREATE_TMP_TABLE, - RESET_SUCCESS_MARKER, - FILL_TMP_TABLE, - EXECUTE_IMPORT, - RETRIEVE_STATISTICS, - WRITE_STATISTICS, - DROP_TRIGGER, - DROP_TMP_TABLE, - RELEASE_READONLY; - } } diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/TransportTools.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/TransportTools.java index 09231ed5c0..6df3c1617d 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/TransportTools.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/TransportTools.java @@ -19,18 +19,179 @@ package com.here.xyz.jobs.steps.impl.transport; +import com.here.xyz.jobs.steps.S3DataFile; import com.here.xyz.jobs.steps.Step; +import com.here.xyz.jobs.steps.impl.SpaceBasedStep; +import com.here.xyz.jobs.steps.inputs.UploadUrl; +import com.here.xyz.jobs.steps.outputs.DownloadUrl; +import com.here.xyz.util.db.SQLQuery; +import io.vertx.core.json.JsonObject; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; public class TransportTools { + private static final Logger logger = LogManager.getLogger(); private static final String JOB_DATA_PREFIX = "job_data_"; private static final String TRIGGER_TABLE_SUFFIX = "_trigger_tbl"; - protected static String getTemporaryJobTableName(Step step) { - return JOB_DATA_PREFIX + step.getId(); + protected static String getSpaceId(Step step) { + if(step instanceof SpaceBasedStep spaceStep) + return spaceStep.getSpaceId(); + return null; + } + + public static String getTemporaryJobTableName(String stepId) { + return JOB_DATA_PREFIX + stepId; + } + + public static String getTemporaryTriggerTableName(String stepId) { + return getTemporaryJobTableName(stepId) + TRIGGER_TABLE_SUFFIX; + } + + protected static SQLQuery buildTemporaryJobTableDropStatement(String schema, String tableName) { + return new SQLQuery("DROP TABLE IF EXISTS ${schema}.${table};") + .withVariable("table", tableName) + .withVariable("schema", schema); + } + + protected static SQLQuery buildTemporaryJobTableCreateStatement(String schema, Step step) { + return new SQLQuery(""" + CREATE TABLE IF NOT EXISTS ${schema}.${table} + ( + s3_bucket text NOT NULL, + s3_path text NOT NULL, + s3_region text NOT NULL, + state text NOT NULL, --jobtype + execution_count int DEFAULT 0, --amount of retries + data jsonb COMPRESSION lz4, --statistic data + i SERIAL, + CONSTRAINT ${primaryKey} PRIMARY KEY (s3_path) + ); + """) + .withVariable("table", getTemporaryJobTableName(step.getId())) + .withVariable("schema", schema) + .withVariable("primaryKey", getTemporaryJobTableName(step.getId()) + "_primKey"); + } + + protected static List buildTemporaryJobTableInsertStatements(String schema, List fileList, + String bucketRegion, Step step) { + List queryList = new ArrayList<>(); + for (S3DataFile input : fileList) { + if (input instanceof UploadUrl || input instanceof DownloadUrl) { + JsonObject data = new JsonObject() + .put("compressed", input.isCompressed()) + .put("filesize", input.getByteSize()); + + queryList.add( + new SQLQuery(""" + INSERT INTO ${schema}.${table} (s3_bucket, s3_path, s3_region, state, data) + VALUES (#{bucketName}, #{s3Key}, #{bucketRegion}, #{state}, #{data}::jsonb) + ON CONFLICT (s3_path) DO NOTHING; + """) //TODO: Why would we ever have a conflict here? Why to fill the table again on resume()? + .withVariable("schema", schema) + .withVariable("table", getTemporaryJobTableName(step.getId())) + .withNamedParameter("s3Key", input.getS3Key()) + .withNamedParameter("bucketName", input.getS3Bucket()) + .withNamedParameter("bucketRegion", bucketRegion) + .withNamedParameter("state", "SUBMITTED") + .withNamedParameter("data", data.toString()) + ); + } + } + //Add final entry + queryList.add( + new SQLQuery(""" + INSERT INTO ${schema}.${table} (s3_bucket, s3_path, s3_region, state, data) + VALUES (#{bucketName}, #{s3Key}, #{bucketRegion}, #{state}, #{data}::jsonb) + ON CONFLICT (s3_path) DO NOTHING; + """) //TODO: Why would we ever have a conflict here? Why to fill the table again on resume()? + .withVariable("schema", schema) + .withVariable("table", getTemporaryJobTableName(step.getId())) + .withNamedParameter("s3Key", "SUCCESS_MARKER") + .withNamedParameter("bucketName", "SUCCESS_MARKER") + .withNamedParameter("state", "SUCCESS_MARKER") + .withNamedParameter("bucketRegion", "SUCCESS_MARKER") + .withNamedParameter("data", "{}")); + return queryList; + } + + protected static SQLQuery buildResetSuccessMarkerAndRunningOnesStatement(String schema, Step step) { + return new SQLQuery(""" + UPDATE ${schema}.${table} + SET state = + CASE + WHEN state = 'SUCCESS_MARKER_RUNNING' THEN 'SUCCESS_MARKER' + WHEN state = 'RUNNING' THEN 'SUBMITTED' + END + WHERE state IN ('SUCCESS_MARKER_RUNNING', 'RUNNING'); + """) + .withVariable("schema", schema) + .withVariable("table", getTemporaryJobTableName(step.getId())); + } + + protected static SQLQuery buildProgressQuery(String schema, Step step) { + return new SQLQuery(""" + SELECT + COALESCE(processed_bytes/overall_bytes, 0) as progress, + COALESCE(processed_bytes,0) as processed_bytes, + COALESCE(finished_cnt,0) as finished_cnt, + COALESCE(failed_cnt,0) as failed_cnt + FROM( + SELECT + (SELECT sum((data->'filesize')::bigint ) FROM ${schema}.${table}) as overall_bytes, + sum((data->'filesize')::bigint ) as processed_bytes, + sum((state = 'FINISHED')::int) as finished_cnt, + sum((state = 'FAILED')::int) as failed_cnt + FROM ${schema}.${table} + WHERE POSITION('SUCCESS_MARKER' in state) = 0 + AND state IN ('FINISHED','FAILED') + )A + """) + .withVariable("schema", schema) + .withVariable("table", getTemporaryJobTableName(step.getId())); + } + + protected static Map createQueryContext(String stepId, String schema, String table, + boolean historyEnabled, String superTable){ + + final Map queryContext = new HashMap<>(Map.of( + "stepId", stepId, + "schema", schema, + "table", table, + "context", superTable != null ? "'DEFAULT'" : "NULL", + "historyEnabled", historyEnabled + )); + + if (superTable != null) + queryContext.put("extendedTable", superTable); + + return queryContext; + } + + protected static void infoLog(Phase phase, Step step, String... messages) { + logger.info("{} [{}@{}] ON '{}' {}", step.getClass().getSimpleName(), step.getGlobalStepId(), phase.name(), getSpaceId(step), messages.length > 0 ? messages : ""); + } + + protected static void errorLog(Phase phase, Step step, Exception e, String... message) { + logger.error("{} [{}@{}] ON '{}' {}", step.getClass().getSimpleName(), step.getGlobalStepId(), phase.name(), getSpaceId(step), message, e); } - protected static String getTemporaryTriggerTableName(Step step) { - return getTemporaryJobTableName(step)+TRIGGER_TABLE_SUFFIX; + protected enum Phase { + GRAPH_TRANSFORMER, + JOB_EXECUTOR, + STEP_EXECUTE, + STEP_RESUME, + STEP_CANCEL, + STEP_ON_STATE_CHECK, + STEP_ON_ASYNC_FAILURE, + STEP_ON_ASYNC_SUCCESS, + JOB_DELETE, + JOB_VALIDATE } } diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/ImportFilesQuickValidator.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/tools/ImportFilesQuickValidator.java similarity index 97% rename from xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/ImportFilesQuickValidator.java rename to xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/tools/ImportFilesQuickValidator.java index a586b87d27..e7a2595c10 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/ImportFilesQuickValidator.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/impl/transport/tools/ImportFilesQuickValidator.java @@ -17,7 +17,7 @@ * License-Filename: LICENSE */ -package com.here.xyz.jobs.steps.impl.transport; +package com.here.xyz.jobs.steps.impl.transport.tools; import static com.here.xyz.XyzSerializable.Mappers.DEFAULT_MAPPER; @@ -44,7 +44,7 @@ public class ImportFilesQuickValidator { private static final int VALIDATE_LINE_PAGE_SIZE_BYTES = 512 * 1024; private static final int VALIDATE_LINE_MAX_LINE_SIZE_BYTES = 4 * 1024 * 1024; - static void validate(S3DataFile s3File, Format format, EntityPerLine entityPerLine) throws ValidationException { + public static void validate(S3DataFile s3File, Format format, EntityPerLine entityPerLine) throws ValidationException { try { validateFirstCSVLine(s3File, format, "", 0, entityPerLine); } diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/inputs/Input.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/inputs/Input.java index 1b6afeab20..749bd9ec50 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/inputs/Input.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/inputs/Input.java @@ -145,7 +145,7 @@ private static List loadInputsAndWriteMetadata(String jobId return inputs; } - static final InputsMetadata loadMetadata(String jobId) throws IOException { + static final InputsMetadata loadMetadata(String jobId) throws IOException, AmazonS3Exception { InputsMetadata metadata = metadataCache.get(jobId); if (metadata != null) return metadata; @@ -182,7 +182,7 @@ static final void storeMetadata(String jobId, List inputs, String referen Map metadata = inputs.stream() .collect(Collectors.toMap(input -> (input.s3Bucket == null ? "" : "s3://" + input.s3Bucket + "/") + input.s3Key, input -> new InputMetadata(input.byteSize, input.compressed))); - storeMetadata(jobId, new InputsMetadata(metadata, Set.of(jobId), referencedJobId)); + storeMetadata(jobId, new InputsMetadata(metadata, new HashSet<>(Set.of(jobId)), referencedJobId)); } static final List loadInputsInParallel(String bucketName, String inputS3Prefix) { @@ -249,7 +249,7 @@ private static void deleteInputs(String owningJobId, String referencingJob) { metadata = loadMetadata(owningJobId); metadata.referencingJobs().remove(referencingJob); } - catch (IOException ignore) {} + catch (AmazonS3Exception | IOException ignore) {} //Only delete the inputs if no other job is referencing them anymore if (metadata == null || metadata.referencingJobs().isEmpty()) { diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/outputs/DownloadUrl.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/outputs/DownloadUrl.java index 327ab7e047..84bf8efdaf 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/outputs/DownloadUrl.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/outputs/DownloadUrl.java @@ -19,22 +19,26 @@ package com.here.xyz.jobs.steps.outputs; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonView; import com.here.xyz.jobs.steps.Config; import com.here.xyz.jobs.steps.S3DataFile; import com.here.xyz.jobs.util.S3Client; +import java.io.IOException; import java.net.URL; public class DownloadUrl extends Output implements S3DataFile { @JsonView(Public.class) private long byteSize; + @JsonIgnore + private byte[] content; + private String contentType = "application/octet-stream"; @Override - public void store(String s3Key) { - /* - NOTE: Nothing to do here for now, later (for some step implementations it could be usable if we implement S3 upload logic for binaries here) - However, for now all step implementations care about uploading binaries to S3 by themselves (e.g. EMR, DB related steps) - */ + public void store(String s3Key) throws IOException { + if (content == null) + throw new IllegalStateException("No content was provided for the output to be stored."); + S3Client.getInstance().putObject(s3Key, contentType, content); } @JsonView(Public.class) @@ -51,6 +55,24 @@ public long getByteSize() { return byteSize; } + public void setContent(byte[] content) { + this.content = content; + } + + public DownloadUrl withContent(byte[] content) { + setContent(content); + return this; + } + + public void setContentType(String contentType) { + this.contentType = contentType; + } + + public DownloadUrl withContentType(String contentType) { + setContentType(contentType); + return this; + } + @Override public String getS3Bucket() { //Current outputs are written to default bucket only diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/outputs/FileStatistics.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/outputs/FileStatistics.java new file mode 100644 index 0000000000..19081d5aa6 --- /dev/null +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/outputs/FileStatistics.java @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2017-2024 HERE Europe B.V. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * License-Filename: LICENSE + */ + +package com.here.xyz.jobs.steps.outputs; + +public class FileStatistics extends ModelBasedOutput { + private long exportedFeatures; + private long exportedBytes; + private int exportedFiles; + + public long getExportedFeatures() { + return exportedFeatures; + } + + public void setExportedFeatures(long exportedFeatures) { + this.exportedFeatures = exportedFeatures; + } + + public FileStatistics withRowsExported(long rowsExported) { + setExportedFeatures(rowsExported); + return this; + } + + public long getExportedBytes() { + return exportedBytes; + } + + public void setExportedBytes(long exportedBytes) { + this.exportedBytes = exportedBytes; + } + + public FileStatistics withBytesExported(long bytesExported) { + setExportedBytes(bytesExported); + return this; + } + + public int getExportedFiles() { + return exportedFiles; + } + + public void setExportedFiles(int exportedFiles) { + this.exportedFiles = exportedFiles; + } + + public FileStatistics withFilesCreated(int filesCreated) { + setExportedFiles(filesCreated); + return this; + } +} diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/outputs/ModelBasedOutput.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/outputs/ModelBasedOutput.java index 17309827db..d7d58d30fc 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/outputs/ModelBasedOutput.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/steps/outputs/ModelBasedOutput.java @@ -28,7 +28,8 @@ @JsonTypeInfo(use = Id.NAME, property = "type") @JsonSubTypes({ - @JsonSubTypes.Type(value = FeatureStatistics.class, name = "FeatureStatistics") + @JsonSubTypes.Type(value = FeatureStatistics.class, name = "FeatureStatistics"), + @JsonSubTypes.Type(value = FileStatistics.class, name = "FileStatistics") }) public abstract class ModelBasedOutput extends Output { @Override diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/util/JobWebClient.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/util/JobWebClient.java index c325180df3..9b0516abec 100644 --- a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/util/JobWebClient.java +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/util/JobWebClient.java @@ -21,11 +21,14 @@ import static com.google.common.net.HttpHeaders.CONTENT_TYPE; import static com.google.common.net.MediaType.JSON_UTF_8; +import static com.here.xyz.XyzSerializable.deserialize; import static java.time.temporal.ChronoUnit.SECONDS; +import com.fasterxml.jackson.core.JsonProcessingException; import com.here.xyz.XyzSerializable; import com.here.xyz.jobs.steps.Config; import com.here.xyz.jobs.steps.Step; +import com.here.xyz.models.hub.Space; import com.here.xyz.util.web.XyzWebClient; import java.net.http.HttpRequest; import java.net.http.HttpRequest.BodyPublishers; diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/util/test/ContentCreator.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/util/test/ContentCreator.java new file mode 100644 index 0000000000..81bb97d844 --- /dev/null +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/util/test/ContentCreator.java @@ -0,0 +1,62 @@ +package com.here.xyz.jobs.util.test; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.here.xyz.XyzSerializable; +import com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace; +import com.here.xyz.models.geojson.coordinates.PointCoordinates; +import com.here.xyz.models.geojson.implementation.Feature; +import com.here.xyz.models.geojson.implementation.FeatureCollection; +import com.here.xyz.models.geojson.implementation.Point; +import com.here.xyz.models.geojson.implementation.Properties; +import com.here.xyz.models.geojson.implementation.Geometry; +import org.locationtech.jts.io.ParseException; +import org.locationtech.jts.io.WKBReader; + +import java.util.Random; + +public class ContentCreator { + /** Generate content */ + public static byte[] generateImportFileContent(ImportFilesToSpace.Format format, int featureCnt) { + String output = ""; + + for (int i = 1; i <= featureCnt; i++) { + output += generateContentLine(format, i); + } + return output.getBytes(); + } + + public static String generateContentLine(ImportFilesToSpace.Format format, int i){ + Random rd = new Random(); + String lineSeparator = "\n"; + + if(format.equals(ImportFilesToSpace.Format.CSV_JSON_WKB)) + return "\"{'\"properties'\": {'\"test'\": "+i+"}}\",01010000A0E61000007DAD4B8DD0AF07C0BD19355F25B74A400000000000000000"+lineSeparator; + else if(format.equals(ImportFilesToSpace.Format.CSV_GEOJSON)) + return "\"{'\"type'\":'\"Feature'\",'\"geometry'\":{'\"type'\":'\"Point'\",'\"coordinates'\":["+(rd.nextInt(179))+"."+(rd.nextInt(100))+","+(rd.nextInt(79))+"."+(rd.nextInt(100))+"]},'\"properties'\":{'\"test'\":"+i+"}}\""+lineSeparator; + else + return "{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":["+(rd.nextInt(179))+"."+(rd.nextInt(100))+","+(rd.nextInt(79))+"."+(rd.nextInt(100))+"]},\"properties\":{\"test\":"+i+"}}"+lineSeparator; + } + + public static FeatureCollection generateRandomFeatureCollection(int featureCnt) { + FeatureCollection fc = new FeatureCollection(); + try { + for (int i = 0; i < featureCnt; i++) + fc.getFeatures().add(new Feature().withProperties(new Properties().with("test", i)) + .withGeometry(new Point().withCoordinates(new PointCoordinates(i, i % 90)))); + }catch (JsonProcessingException e){} + + return fc; + } + + public static Feature getFeatureFromCSVLine(String csvLine) throws JsonProcessingException { + return XyzSerializable.deserialize( csvLine.substring(1, csvLine.lastIndexOf(",") -1 ).replaceAll("'\"","\""), Feature.class); + } + + public static Geometry getWKBFromCsvLine(String csvLine) throws ParseException { + String geomAsWKB = csvLine.substring(csvLine.lastIndexOf(",") + 1 ); + byte[] aux = WKBReader.hexToBytes(geomAsWKB); + /** Try to read WKB */ + org.locationtech.jts.geom.Geometry read = new WKBReader().read(aux); + return Geometry.convertJTSGeometry(read); + } +} diff --git a/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/util/test/StepTestBase.java b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/util/test/StepTestBase.java new file mode 100644 index 0000000000..cd4ed4c40a --- /dev/null +++ b/xyz-jobs/xyz-job-steps/src/main/java/com/here/xyz/jobs/util/test/StepTestBase.java @@ -0,0 +1,360 @@ +/* + * Copyright (C) 2017-2024 HERE Europe B.V. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * License-Filename: LICENSE + */ + +package com.here.xyz.jobs.util.test; + +import static com.google.common.net.HttpHeaders.CONTENT_TYPE; +import static com.here.xyz.jobs.steps.execution.LambdaBasedStep.LambdaStepRequest.RequestType.START_EXECUTION; +import static com.here.xyz.jobs.steps.execution.LambdaBasedStep.LambdaStepRequest.RequestType.SUCCESS_CALLBACK; +import static com.here.xyz.jobs.steps.inputs.Input.inputS3Prefix; +import static com.here.xyz.util.Random.randomAlpha; +import static com.here.xyz.util.db.pg.XyzSpaceTableHelper.buildSpaceTableDropIndexQueries; +import static java.lang.Thread.sleep; +import static java.net.http.HttpClient.Redirect.NORMAL; + +import com.amazonaws.services.lambda.runtime.Context; +import com.google.common.io.ByteStreams; +import com.google.common.net.MediaType; +import com.here.xyz.XyzSerializable; +import com.here.xyz.events.ContextAwareEvent; +import com.here.xyz.jobs.steps.Config; +import com.here.xyz.jobs.steps.execution.LambdaBasedStep; +import com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace; +import com.here.xyz.jobs.steps.impl.transport.TransportTools; +import com.here.xyz.jobs.steps.outputs.DownloadUrl; +import com.here.xyz.jobs.util.S3Client; +import com.here.xyz.models.geojson.implementation.Feature; +import com.here.xyz.models.geojson.implementation.FeatureCollection; +import com.here.xyz.models.hub.Space; +import com.here.xyz.models.hub.Tag; +import com.here.xyz.responses.StatisticsResponse; +import com.here.xyz.util.db.SQLQuery; +import com.here.xyz.util.db.datasource.DataSourceProvider; +import com.here.xyz.util.db.datasource.DatabaseSettings; +import com.here.xyz.util.db.datasource.PooledDataSources; +import com.here.xyz.util.service.aws.SimulatedContext; +import com.here.xyz.util.web.HubWebClient; +import com.here.xyz.util.web.XyzWebClient; +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.zip.GZIPInputStream; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; +import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; +import software.amazon.awssdk.core.SdkBytes; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.lambda.LambdaClient; +import software.amazon.awssdk.services.lambda.model.InvokeRequest; + +public class StepTestBase { + private static final Logger logger = LogManager.getLogger(); + protected String SPACE_ID = getClass().getSimpleName() + "_" + randomAlpha(5); + protected String JOB_ID = getClass().getSimpleName() + "_" + randomAlpha(5); + + protected static final String LAMBDA_ARN = "arn:aws:lambda:us-east-1:000000000000:function:job-step"; + private static final HubWebClient hubWebClient; + private static final S3Client s3Client; + private static LambdaClient lambdaClient; + private static final String PG_HOST = "localhost"; + private static final String PG_DB = "postgres"; + private static final String PG_USER = "postgres"; + private static final String PG_PW = "password"; + private static final String SCHEMA = "public"; + public static final Config config = new Config(); + + static { + try { + + Config.instance.JOBS_S3_BUCKET = "test-bucket"; + Config.instance.AWS_REGION = "us-east-1"; + Config.instance.ECPS_PHRASE = "local"; + Config.instance.HUB_ENDPOINT = "http://localhost:8080/hub"; + Config.instance.LOCALSTACK_ENDPOINT = new URI("http://localhost:4566"); + Config.instance.JOB_API_ENDPOINT = new URL("http://localhost:7070"); + hubWebClient = HubWebClient.getInstance("http://localhost:8080/hub"); + s3Client = S3Client.getInstance(); + lambdaClient = LambdaClient.builder() + .region(Region.of(Config.instance.AWS_REGION)) + .credentialsProvider(StaticCredentialsProvider.create(AwsBasicCredentials.create("localstack", "localstack"))) + .endpointOverride(Config.instance.LOCALSTACK_ENDPOINT) + .build(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + public enum S3ContentType { + APPLICATION_JSON("application/json"), + TEXT_CSV("text/csv"); + + private final String value; + S3ContentType(String value) { this.value = value; } + } + + private DataSourceProvider getDataSourceProvider() { + return new PooledDataSources( + new DatabaseSettings("testSteps") + .withApplicationName(StepTestBase.class.getSimpleName()) + .withHost(PG_HOST) + .withDb(PG_DB) + .withUser(PG_USER) + .withPassword(PG_PW) + .withDbMaxPoolSize(2)); + } + + protected Space createSpace(String spaceId){ + return createSpace(new Space().withId(spaceId), false); + } + + protected Space createSpace(Space space, boolean force) { + String title = "test space for jobs"; + try { + space.setTitle(title); + return hubWebClient.createSpace(space); + } + catch (XyzWebClient.ErrorResponseException e) { + if (e.getErrorResponse().statusCode() == 409) { + deleteSpace(space.getId()); + return createSpace(space, false); + } + else { + System.out.println("Hub Error: " + e.getMessage()); + } + } catch (XyzWebClient.WebClientException e) { + System.out.println("Hub Error: " + e.getMessage()); + } + return null; + } + + protected void createTag(String spaceId, Tag tag) { + try { + hubWebClient.postTag(spaceId, tag); + } catch (XyzWebClient.WebClientException e) { + System.out.println("Hub Error: " + e.getMessage()); + } + } + + protected void deleteSpace(String spaceId) { + try { + hubWebClient.deleteSpace(spaceId); + } catch (XyzWebClient.WebClientException e) { + System.out.println("Hub Error: " + e.getMessage()); + } + } + + protected StatisticsResponse getStatistics(String spaceId) { + try { + return hubWebClient.loadSpaceStatistics(spaceId, ContextAwareEvent.SpaceContext.EXTENSION); + } catch (XyzWebClient.WebClientException e) { + System.out.println("Hub Error: " + e.getMessage()); + } + return null; + } + + protected FeatureCollection getFeaturesFromSmallSpace(String spaceId, String propertyFilter, boolean force2D) { + try { + return hubWebClient.getFeaturesFromSmallSpace(spaceId, ContextAwareEvent.SpaceContext.EXTENSION, propertyFilter, force2D); + } catch (XyzWebClient.WebClientException e) { + System.out.println("Hub Error: " + e.getMessage()); + } + return null; + } + + protected FeatureCollection customReadFeaturesQuery(String spaceId, String customPath) { + try { + return hubWebClient.customReadFeaturesQuery(spaceId, customPath); + } catch (XyzWebClient.WebClientException e) { + System.out.println("Hub Error: " + e.getMessage()); + } + return null; + } + + protected void putFeatureCollectionToSpace(String spaceId, FeatureCollection fc) { + try { + hubWebClient.putFeaturesWithoutResponse(spaceId, fc); + } catch (XyzWebClient.WebClientException e) { + System.out.println("Hub Error: " + e.getMessage()); + } + } + + protected void putRandomFeatureCollectionToSpace(String spaceId, int featureCount) { + try { + hubWebClient.putFeaturesWithoutResponse(spaceId, ContentCreator.generateRandomFeatureCollection(featureCount)); + } catch (XyzWebClient.WebClientException e) { + System.out.println("Hub Error: " + e.getMessage()); + } + } + + protected List listExistingIndexes(String spaceId) throws SQLException { + return new SQLQuery("SELECT * FROM xyz_index_list_all_available(#{schema}, #{table});") + .withNamedParameter("schema", SCHEMA) + .withNamedParameter("table", spaceId) + .run(getDataSourceProvider(), rs -> { + List result = new ArrayList<>(); + while(rs.next()) + result.add(rs.getString(1)); + return result; + }); + } + + protected void deleteAllExistingIndexes(String spaceId) throws SQLException { + List existingIndexes = listExistingIndexes(spaceId); + List dropQueries = buildSpaceTableDropIndexQueries(SCHEMA, existingIndexes); + SQLQuery.join(dropQueries, ";").write(getDataSourceProvider()); + } + + protected void deleteAllJobTables(List stepIds) throws SQLException { + List dropQueries = new ArrayList<>(); + for (String stepId : stepIds) { + dropQueries.add(new SQLQuery("DROP TABLE IF EXISTS ${schema}.${table};") + .withVariable("schema", SCHEMA) + .withVariable("table", TransportTools.getTemporaryJobTableName(stepId)) + ); + dropQueries.add( + new SQLQuery("DROP TABLE IF EXISTS ${schema}.${table};") + .withVariable("schema", SCHEMA) + .withVariable("table", TransportTools.getTemporaryTriggerTableName(stepId)) + ); + } + SQLQuery.join(dropQueries, ";").write(getDataSourceProvider()); + } + + protected void uploadFileToS3(String s3Key, S3ContentType contentType, byte[] data, boolean gzip) throws IOException { + s3Client.putObject(s3Key, contentType.value, data, gzip); + } + + protected void cleanS3Files(String s3Prefix) { + s3Client.deleteFolder(s3Prefix); + } + + private void invokeLambda(String lambdaArn, byte[] payload) { + lambdaClient.invoke(InvokeRequest.builder() + .functionName(lambdaArn) + .payload(SdkBytes.fromByteArray(payload)) + .build()); + } + + protected void sendLambdaStepRequestBlock(LambdaBasedStep step) throws IOException, InterruptedException { + sendLambdaStepRequest(step, START_EXECUTION, true); + sleep(500); + sendLambdaStepRequest(step, SUCCESS_CALLBACK,true); + } + + protected void sendLambdaStepRequest(LambdaBasedStep step, LambdaBasedStep.LambdaStepRequest.RequestType requestType, boolean simulate) throws IOException { + Map stepMap = step.toMap(); + stepMap.put("taskToken.$", "test123"); + stepMap.put("jobId", JOB_ID); + LambdaBasedStep enrichedStep = XyzSerializable.fromMap(stepMap, LambdaBasedStep.class); + LambdaBasedStep.LambdaStepRequest request = new LambdaBasedStep.LambdaStepRequest().withStep(enrichedStep).withType(requestType); + + logger.info("sendLambdaStepRequest with job-id: {}", JOB_ID); + + if(!simulate) + invokeLambda(LAMBDA_ARN, request.toByteArray()); + else{ + OutputStream os = new ByteArrayOutputStream(); + Context ctx = new SimulatedContext("localLambda", null); + new LambdaBasedStep.LambdaBasedStepExecutor().handleRequest(new ByteArrayInputStream(request.toByteArray()), os, ctx); + } + } + + //TODO: find a central place to avoid double implementation from JobPlayground + public void uploadFiles(String jobId, int uploadFileCount, int featureCountPerFile, ImportFilesToSpace.Format format) + throws IOException { + //Generate N Files with M features + for (int i = 0; i < uploadFileCount; i++) + uploadInputFile(jobId, ContentCreator.generateImportFileContent(format, featureCountPerFile)); + } + + private void uploadInputFile(String jobId , byte[] bytes) throws IOException { + uploadFileToS3(inputS3Prefix(jobId) + "/" + UUID.randomUUID(), S3ContentType.APPLICATION_JSON, bytes, false); + } + + protected List downloadFileAndSerializeFeatures(DownloadUrl output) throws IOException { + logger.info("Check file: {}",output.getS3Key()); + List features = new ArrayList<>(); + + InputStream dataStream = S3Client.getInstance().streamObjectContent(output.getS3Key()); + + if (output.isCompressed()) + dataStream = new GZIPInputStream(dataStream); + + try (BufferedReader reader = new BufferedReader(new InputStreamReader(dataStream))) { + String line; + + while ((line = reader.readLine()) != null) { + features.add(XyzSerializable.deserialize(line, Feature.class)); + } + } + return features; + } + + protected List downloadFileAsText(URL url, boolean isCompressed, MediaType mediaType) throws IOException, URISyntaxException, InterruptedException { + List fileInLines = new ArrayList<>(); + + logger.info("Check file: {}", url); + InputStream dataStream; + HttpRequest request = HttpRequest.newBuilder() + .uri(url.toURI()) + .header(CONTENT_TYPE, mediaType.toString()) + .method("GET", HttpRequest.BodyPublishers.noBody()) + .version(HttpClient.Version.HTTP_1_1) + .build(); + + HttpClient client = HttpClient.newBuilder().followRedirects(NORMAL).build(); + HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofInputStream()); + if (response.statusCode() >= 400) + throw new RuntimeException("Received error response!"); + + dataStream = response.body(); + + if (isCompressed) + dataStream = new GZIPInputStream(dataStream); + + try (BufferedReader reader = new BufferedReader(new InputStreamReader(dataStream))) { + String line; + + while ((line = reader.readLine()) != null) { + fileInLines.add(line); + } + } + return fileInLines; + } + + protected FeatureCollection readTestFeatureCollection(String filePath) throws IOException { + return XyzSerializable.deserialize( new String(ByteStreams.toByteArray(this.getClass().getResourceAsStream(filePath))).trim(), FeatureCollection.class); + } +} diff --git a/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/TestSteps.java b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/TestSteps.java deleted file mode 100644 index 93a0063e61..0000000000 --- a/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/TestSteps.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright (C) 2017-2024 HERE Europe B.V. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * License-Filename: LICENSE - */ - -package com.here.xyz.jobs.steps; - -import com.here.xyz.events.ContextAwareEvent; -import com.here.xyz.jobs.util.S3Client; -import com.here.xyz.responses.StatisticsResponse; -import com.here.xyz.util.db.DatabaseSettings; -import com.here.xyz.util.db.SQLQuery; -import com.here.xyz.util.db.datasource.DataSourceProvider; -import com.here.xyz.util.db.datasource.PooledDataSources; -import com.here.xyz.util.web.HubWebClient; -import com.here.xyz.util.web.XyzWebClient; -import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; -import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; -import software.amazon.awssdk.core.SdkBytes; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.lambda.LambdaClient; -import software.amazon.awssdk.services.lambda.model.InvokeRequest; - -import java.io.IOException; -import java.net.URI; -import java.net.URL; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; - -import static com.here.xyz.util.db.pg.XyzSpaceTableHelper.buildSpaceTableDropIndexQueries; - -public class TestSteps { - private static final HubWebClient hubWebClient; - private static final S3Client s3Client; - private static LambdaClient lambdaClient; - private static final String PG_HOST = "localhost"; - private static final String PG_DB = "postgres"; - private static final String PG_USER = "postgres"; - private static final String PG_PW = "password"; - private static final String SCHEMA = "public"; - - static { - try { - new Config(); - Config.instance.JOBS_S3_BUCKET = "test-bucket"; - Config.instance.AWS_REGION = "us-east-1"; - Config.instance.ECPS_PHRASE = "local"; - Config.instance.HUB_ENDPOINT = "http://localhost:8080/hub"; - Config.instance.LOCALSTACK_ENDPOINT = new URI("http://localhost:4566"); - Config.instance.JOB_API_ENDPOINT = new URL("http://localhost:7070"); - hubWebClient = HubWebClient.getInstance("http://localhost:8080/hub"); - s3Client = S3Client.getInstance(); - lambdaClient = LambdaClient.builder() - .region(Region.of(Config.instance.AWS_REGION)) - .credentialsProvider(StaticCredentialsProvider.create(AwsBasicCredentials.create("localstack", "localstack"))) - .endpointOverride(Config.instance.LOCALSTACK_ENDPOINT) - .build(); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - public enum S3ContentType { - APPLICATION_JSON("application/json"), - TEXT_CSV("text/csv"); - - private final String value; - S3ContentType(String value) { this.value = value; } - } - - private static DataSourceProvider getDataSourceProvider() { - return new PooledDataSources(new DatabaseSettings("testPSQL") - .withHost(PG_HOST) - .withDb(PG_DB) - .withUser(PG_USER) - .withPassword(PG_PW) - .withDbMaxPoolSize(2)); - } - - protected static void createSpace(String spaceId) { - try { - hubWebClient.createSpace(spaceId, "test space for jobs"); - } catch (XyzWebClient.WebClientException e) { - System.out.println("Hub Error: " + e.getMessage()); - } - } - - protected static void deleteSpace(String spaceId) { - try { - hubWebClient.deleteSpace(spaceId); - } catch (XyzWebClient.WebClientException e) { - System.out.println("Hub Error: " + e.getMessage()); - } - } - - protected static StatisticsResponse getStatistics(String spaceId) { - try { - return hubWebClient.loadSpaceStatistics(spaceId, ContextAwareEvent.SpaceContext.EXTENSION); - } catch (XyzWebClient.WebClientException e) { - System.out.println("Hub Error: " + e.getMessage()); - } - return null; - } - - protected static List listExistingIndexes(String spaceId) throws SQLException { - return new SQLQuery("SELECT * FROM xyz_index_list_all_available(#{schema}, #{table});") - .withNamedParameter("schema", SCHEMA) - .withNamedParameter("table", spaceId) - .run(getDataSourceProvider(), rs -> { - List result = new ArrayList<>(); - while(rs.next()) - result.add(rs.getString(1)); - return result; - }); - } - - protected void deleteAllExistingIndexes(String spaceId) throws SQLException { - List existingIndexes = listExistingIndexes(spaceId); - List dropQueries = buildSpaceTableDropIndexQueries(SCHEMA, existingIndexes); - SQLQuery.join(dropQueries, ";").write(getDataSourceProvider()); - } - - protected void uploadFileToS3(String s3Key, S3ContentType contentType, byte[] data, boolean gzip) throws IOException { - s3Client.putObject(s3Key, contentType.value, data, gzip); - } - - protected void cleanS3Files(String s3Prefix) { - s3Client.deleteFolder(s3Prefix); - } - - protected void invokeLambda(String lambdaArn, byte[] payload) { - lambdaClient.invoke(InvokeRequest.builder() - .functionName(lambdaArn) - .payload(SdkBytes.fromByteArray(payload)) - .build()); - } - -} diff --git a/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/CreateIndexStepTest.java b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/CreateIndexStepTest.java new file mode 100644 index 0000000000..7bf96b6bbd --- /dev/null +++ b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/CreateIndexStepTest.java @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2017-2024 HERE Europe B.V. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * License-Filename: LICENSE + */ + +package com.here.xyz.jobs.steps.impl; + +import com.here.xyz.jobs.steps.execution.LambdaBasedStep; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.List; + +import static com.here.xyz.jobs.steps.execution.LambdaBasedStep.LambdaStepRequest.RequestType.START_EXECUTION; +import static com.here.xyz.util.db.pg.XyzSpaceTableHelper.Index.GEO; +import static java.lang.Thread.sleep; +import static org.junit.Assert.assertEquals; + +public class CreateIndexStepTest extends StepTest { + + @Test + public void testCreateIndex() throws Exception { + deleteAllExistingIndexes(SPACE_ID); + Assertions.assertEquals(0, listExistingIndexes(SPACE_ID).size()); + + LambdaBasedStep step = new CreateIndex().withSpaceId(SPACE_ID).withIndex(GEO); + + sendLambdaStepRequest(step, START_EXECUTION, false); + //Index Creation takes time + sleep(1000); + + List indexes = listExistingIndexes(SPACE_ID); + Assertions.assertEquals(1, indexes.size()); + Assertions.assertEquals("idx_" + SPACE_ID + "_" + GEO.toString().toLowerCase(), indexes.get(0)); + } +} diff --git a/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/DropIndexStepTest.java b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/DropIndexStepTest.java new file mode 100644 index 0000000000..4f4917f42b --- /dev/null +++ b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/DropIndexStepTest.java @@ -0,0 +1,41 @@ +/* + * Copyright (C) 2017-2024 HERE Europe B.V. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * License-Filename: LICENSE + */ + +package com.here.xyz.jobs.steps.impl; + +import com.here.xyz.jobs.steps.execution.LambdaBasedStep; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import static com.here.xyz.jobs.steps.execution.LambdaBasedStep.LambdaStepRequest.RequestType.START_EXECUTION; +import static java.lang.Thread.sleep; + +public class DropIndexStepTest extends StepTest { + + @Test + public void testDropIndexesStep() throws Exception { + Assertions.assertTrue(listExistingIndexes(SPACE_ID).size() > 0); + + LambdaBasedStep step = new DropIndexes().withSpaceId(SPACE_ID); + sendLambdaStepRequest(step, START_EXECUTION, false); + sleep(1000); + + Assertions.assertEquals(0, listExistingIndexes(SPACE_ID).size()); + } +} diff --git a/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/ExportStepTest.java b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/ExportStepTest.java new file mode 100644 index 0000000000..b05da2c4fe --- /dev/null +++ b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/ExportStepTest.java @@ -0,0 +1,141 @@ +/* + * Copyright (C) 2017-2024 HERE Europe B.V. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * License-Filename: LICENSE + */ + +package com.here.xyz.jobs.steps.impl; + +import com.here.xyz.events.PropertiesQuery; +import com.here.xyz.jobs.datasets.filters.SpatialFilter; +import com.here.xyz.jobs.steps.execution.LambdaBasedStep; +import com.here.xyz.jobs.steps.impl.transport.ExportSpaceToFiles; +import com.here.xyz.jobs.steps.outputs.DownloadUrl; +import com.here.xyz.jobs.steps.outputs.FileStatistics; +import com.here.xyz.jobs.steps.outputs.Output; +import com.here.xyz.models.geojson.coordinates.PointCoordinates; +import com.here.xyz.models.geojson.implementation.Feature; +import com.here.xyz.models.geojson.implementation.FeatureCollection; +import com.here.xyz.models.geojson.implementation.Point; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +public class ExportStepTest extends StepTest { + /** + * fcWithMixedGeometryTypes.geojson: + * 11 features: + * + * 3 Points + * 1 MultiPoint (2 Points) + * 2 Lines + * 2 MultiLine (2 Lines each) + * 1 Polygon with hole + * 1 Polygon without hole + * 1 MultiPolygon (2 Polygons) + * + */ + + + @BeforeEach + public void setUp() throws Exception { + putFeatureCollectionToSpace(SPACE_ID, readTestFeatureCollection("/testFeatureCollections/fcWithMixedGeometryTypes.geojson")); + } + + @Test + public void testExportSpaceToFilesStepUnfiltered() throws Exception { + FeatureCollection allExistingFeatures = getFeaturesFromSmallSpace(SPACE_ID, null,false); + + LambdaBasedStep step = new ExportSpaceToFiles() + .withSpaceId(SPACE_ID) + .withJobId(JOB_ID); + + sendLambdaStepRequest(step, LambdaBasedStep.LambdaStepRequest.RequestType.START_EXECUTION, false); + Thread.sleep(2000); + //TODO: switch back to simulation if test issue is fixed + //sendLambdaStepRequestBlock(step); + checkOutputs(allExistingFeatures, step.loadOutputs(true)); + } + + @Test + public void testExportSpaceToFilesStepWithPropertyFilter() throws Exception { + String propertyFilterString = "p.description=\"Point\""; + + FeatureCollection allExistingFeatures = getFeaturesFromSmallSpace(SPACE_ID, propertyFilterString, false); + + LambdaBasedStep step = new ExportSpaceToFiles() + .withPropertyFilter(PropertiesQuery.fromString(propertyFilterString)) + .withSpaceId(SPACE_ID) + .withJobId(JOB_ID); + + sendLambdaStepRequest(step, LambdaBasedStep.LambdaStepRequest.RequestType.START_EXECUTION, false); + Thread.sleep(2000); + + //TODO: switch back to simulation if test issue is fixed + //sendLambdaStepRequestBlock(step); + checkOutputs(allExistingFeatures, step.loadOutputs(true)); + } + + @Test + public void testExportSpaceToFilesStepWithSpatialFilter() throws Exception { + String spatialFilterString = "spatial?lat=50.102964&lon=8.6709594&clip=true&radius=5500"; + + SpatialFilter spatialFilter = new SpatialFilter() + .withGeometry( + new Point().withCoordinates(new PointCoordinates(8.6709594,50.102964)) + ) + .withRadius(5500) + .withClip(true); + + FeatureCollection allExistingFeatures = customReadFeaturesQuery(SPACE_ID, spatialFilterString); + + LambdaBasedStep step = new ExportSpaceToFiles() + .withSpatialFilter(spatialFilter) + .withSpaceId(SPACE_ID) + .withJobId(JOB_ID); + + sendLambdaStepRequest(step, LambdaBasedStep.LambdaStepRequest.RequestType.START_EXECUTION, false); + Thread.sleep(2000); + //TODO: switch back to simulation if test issue is fixed + //sendLambdaStepRequestBlock(step); + checkOutputs(allExistingFeatures, step.loadOutputs(true)); + } + + private void checkOutputs(FeatureCollection expectedFeatures, List outputs) throws IOException { + Assertions.assertNotEquals(0, outputs.size()); + + List exportedFeatures = new ArrayList<>(); + + for (Object output : outputs) { + if(output instanceof DownloadUrl) { + exportedFeatures.addAll(downloadFileAndSerializeFeatures((DownloadUrl) output)); + }else if(output instanceof FileStatistics statistics) { + Assertions.assertEquals(expectedFeatures.getFeatures().size(), statistics.getExportedFeatures()); + Assertions.assertTrue(statistics.getExportedFiles() > 0); + } + } + + List existingFeaturesIdList = expectedFeatures.getFeatures().stream().map(Feature::getId).collect(Collectors.toList()); + List exportedFeaturesFeaturesIdList = exportedFeatures.stream().map(Feature::getId).collect(Collectors.toList()); + + Assertions.assertTrue(exportedFeaturesFeaturesIdList.containsAll(existingFeaturesIdList)); + } +} diff --git a/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/ExportStepValidationTest.java b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/ExportStepValidationTest.java new file mode 100644 index 0000000000..f68e0d5a79 --- /dev/null +++ b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/ExportStepValidationTest.java @@ -0,0 +1,108 @@ +/* + * Copyright (C) 2017-2024 HERE Europe B.V. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * License-Filename: LICENSE + */ + +package com.here.xyz.jobs.steps.impl; + +import com.here.xyz.jobs.datasets.filters.SpatialFilter; +import com.here.xyz.jobs.steps.execution.LambdaBasedStep; +import com.here.xyz.jobs.steps.impl.transport.ExportSpaceToFiles; +import com.here.xyz.jobs.steps.outputs.DownloadUrl; +import com.here.xyz.jobs.steps.outputs.FileStatistics; +import com.here.xyz.jobs.steps.outputs.Output; +import com.here.xyz.models.geojson.implementation.Feature; +import com.here.xyz.models.geojson.implementation.FeatureCollection; +import com.here.xyz.models.hub.Ref; +import com.here.xyz.models.hub.Space; +import com.here.xyz.util.service.BaseHttpServerVerticle.ValidationException; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +public class ExportStepValidationTest extends StepTest { + + @BeforeEach + public void setup() throws SQLException { + cleanup(); + createSpace(new Space().withId(SPACE_ID).withVersionsToKeep(10),false); + //Write three versions + putRandomFeatureCollectionToSpace(SPACE_ID, 2); + putRandomFeatureCollectionToSpace(SPACE_ID, 2); + putRandomFeatureCollectionToSpace(SPACE_ID, 2); + } + + @Test + public void testInvalidSpatialFilter(){ + SpatialFilter spatialFilter = new SpatialFilter() + .withRadius(5500) + .withClip(true); + + LambdaBasedStep step = new ExportSpaceToFiles() + .withSpatialFilter(spatialFilter) + .withSpaceId(SPACE_ID) + .withJobId(JOB_ID); + + //Check ExceptionType - Geometry is null, leads into ValidationError + Assertions.assertThrows(ValidationException.class, () -> step.validate()); + } + + @Test + public void testInvalidVersionRef(){ + LambdaBasedStep step1 = new ExportSpaceToFiles() + .withVersionRef(new Ref(5)) + .withSpaceId(SPACE_ID) + .withJobId(JOB_ID); + + //Check ExceptionType - Ref Version higher than SpaceVersion + Assertions.assertThrows(ValidationException.class, () -> step1.validate()); + + LambdaBasedStep step2 = new ExportSpaceToFiles() + .withVersionRef(new Ref("1..5")) + .withSpaceId(SPACE_ID) + .withJobId(JOB_ID); + + //Check ExceptionType - Ref EndVersion is higher than max SpaceVersion + Assertions.assertThrows(ValidationException.class, () -> step2.validate()); + } + + private void checkOutputs(FeatureCollection expectedFeatures, List outputs) throws IOException { + Assertions.assertNotEquals(0, outputs.size()); + + List exportedFeatures = new ArrayList<>(); + + for (Object output : outputs) { + if(output instanceof DownloadUrl) { + exportedFeatures.addAll(downloadFileAndSerializeFeatures((DownloadUrl) output)); + }else if(output instanceof FileStatistics statistics) { + Assertions.assertEquals(expectedFeatures.getFeatures().size(), statistics.getExportedFeatures()); + Assertions.assertTrue(statistics.getExportedFiles() > 0); + } + } + + List existingFeaturesIdList = expectedFeatures.getFeatures().stream().map(Feature::getId).collect(Collectors.toList()); + List exportedFeaturesFeaturesIdList = exportedFeatures.stream().map(Feature::getId).collect(Collectors.toList()); + + Assertions.assertTrue(exportedFeaturesFeaturesIdList.containsAll(existingFeaturesIdList)); + } +} diff --git a/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/ImportStepsTest.java b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/ImportStepTest.java similarity index 79% rename from xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/ImportStepsTest.java rename to xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/ImportStepTest.java index c4a70e6937..a21114506d 100644 --- a/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/ImportStepsTest.java +++ b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/ImportStepTest.java @@ -21,16 +21,16 @@ import com.here.xyz.jobs.steps.execution.LambdaBasedStep; import com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace; +import com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.Format; import com.here.xyz.responses.StatisticsResponse; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import static com.here.xyz.jobs.datasets.space.UpdateStrategy.DEFAULT_UPDATE_STRATEGY; -import static com.here.xyz.jobs.steps.execution.LambdaBasedStep.LambdaStepRequest.RequestType.START_EXECUTION; -import static java.lang.Thread.sleep; -import static org.junit.Assert.assertEquals; - -public class ImportStepsTest extends JobStepsTest { +public class ImportStepTest extends StepTest { + private static final int FILE_COUNT = 2; + private static final int FEATURE_COUNT = 10; /** Test Format`s @@ -95,18 +95,19 @@ Test Import in NonEmpty Layer (all Formats / all execution modes ) @Test public void testImportFilesToSpaceStep() throws Exception { StatisticsResponse statsBefore = getStatistics(SPACE_ID); - assertEquals(0L, (Object) statsBefore.getCount().getValue()); + Assertions.assertEquals(0L, (Object) statsBefore.getCount().getValue()); - uploadInputFile(JOB_ID); + uploadFiles(JOB_ID, FILE_COUNT, FEATURE_COUNT, Format.GEOJSON); LambdaBasedStep step = new ImportFilesToSpace() .withUpdateStrategy(DEFAULT_UPDATE_STRATEGY) .withSpaceId(SPACE_ID); - sendLambdaStepRequest(step, START_EXECUTION); - sleep(2000); + sendLambdaStepRequest(step, LambdaBasedStep.LambdaStepRequest.RequestType.START_EXECUTION, false); + Thread.sleep(2000); + //TODO: switch back to simulation if test issue is fixed +// sendLambdaStepRequestBlock(step); StatisticsResponse statsAfter = getStatistics(SPACE_ID); - assertEquals(2L, (Object) statsAfter.getCount().getValue()); + Assertions.assertEquals(Long.valueOf(FILE_COUNT * FEATURE_COUNT), statsAfter.getCount().getValue()); } - } diff --git a/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/JobStepsTest.java b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/JobStepsTest.java deleted file mode 100644 index 72148ca336..0000000000 --- a/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/JobStepsTest.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (C) 2017-2024 HERE Europe B.V. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * License-Filename: LICENSE - */ - -package com.here.xyz.jobs.steps.impl; - -import static com.here.xyz.jobs.datasets.space.UpdateStrategy.DEFAULT_UPDATE_STRATEGY; -import static com.here.xyz.jobs.steps.execution.LambdaBasedStep.LambdaStepRequest.RequestType.START_EXECUTION; -import static com.here.xyz.jobs.steps.inputs.Input.inputS3Prefix; -import static com.here.xyz.util.Random.randomAlpha; -import static com.here.xyz.util.db.pg.XyzSpaceTableHelper.Index.GEO; -import static java.lang.Thread.sleep; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import com.amazonaws.services.lambda.runtime.Context; -import com.here.xyz.XyzSerializable; -import com.here.xyz.jobs.steps.TestSteps; -import com.here.xyz.jobs.steps.execution.LambdaBasedStep; -import com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace; -import com.here.xyz.responses.StatisticsResponse; -import com.here.xyz.util.service.aws.SimulatedContext; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class JobStepsTest extends TestSteps { - protected static final String LAMBDA_ARN = "arn:aws:lambda:us-east-1:000000000000:function:job-step"; - protected static final String SPACE_ID = "test-space-" + randomAlpha(5); - protected static final String JOB_ID = "test-job-" + randomAlpha(5); - - @BeforeEach - public void setup() { - cleanup(); - createSpace(SPACE_ID); - } - @AfterEach - public void cleanup() { - deleteSpace(SPACE_ID); - cleanS3Files(JOB_ID); - } - - @Test - public void testDropIndexesStep() throws Exception { - assertTrue(listExistingIndexes(SPACE_ID).size() > 0); - - LambdaBasedStep step = new DropIndexes().withSpaceId(SPACE_ID); -// simulateLambdaStepRequest(step, START_EXECUTION); -// simulateLambdaStepRequest(step, SUCCESS_CALLBACK); - - sendLambdaStepRequest(step, START_EXECUTION); - sleep(2000); - - assertEquals(0, listExistingIndexes(SPACE_ID).size()); - } - - @Test - public void testCreateIndex() throws Exception { - deleteAllExistingIndexes(SPACE_ID); - assertEquals(0, listExistingIndexes(SPACE_ID).size()); - - LambdaBasedStep step = new CreateIndex().withSpaceId(SPACE_ID).withIndex(GEO); - -// simulateLambdaStepRequest(step, START_EXECUTION); -// simulateLambdaStepRequest(step, SUCCESS_CALLBACK); - - sendLambdaStepRequest(step, START_EXECUTION); - sleep(2000); - - List indexes = listExistingIndexes(SPACE_ID); - assertEquals(1, indexes.size()); - assertEquals("idx_" + SPACE_ID + "_" + GEO.toString().toLowerCase(), indexes.get(0)); - } - - protected void simulateLambdaStepRequest(LambdaBasedStep step, LambdaBasedStep.LambdaStepRequest.RequestType requestType) throws IOException { - OutputStream os = new ByteArrayOutputStream(); - Context ctx = new SimulatedContext("localLambda", null); - - Map stepMap = step.toMap(); - stepMap.put("taskToken.$", "test123"); - stepMap.put("jobId", JOB_ID); - LambdaBasedStep enrichedStep = XyzSerializable.fromMap(stepMap, LambdaBasedStep.class); - - LambdaBasedStep.LambdaStepRequest request = new LambdaBasedStep.LambdaStepRequest().withStep(enrichedStep).withType(requestType); - new LambdaBasedStep.LambdaBasedStepExecutor().handleRequest(new ByteArrayInputStream(request.toByteArray()), os, ctx); - } - - protected void sendLambdaStepRequest(LambdaBasedStep step, LambdaBasedStep.LambdaStepRequest.RequestType requestType) { - Map stepMap = step.toMap(); - stepMap.put("taskToken.$", "test123"); - stepMap.put("jobId", JOB_ID); - LambdaBasedStep enrichedStep = XyzSerializable.fromMap(stepMap, LambdaBasedStep.class); - LambdaBasedStep.LambdaStepRequest request = new LambdaBasedStep.LambdaStepRequest().withStep(enrichedStep).withType(requestType); - - invokeLambda(LAMBDA_ARN, request.toByteArray()); - } - - protected void uploadInputFile(String jobId) throws IOException { - String data = "{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[8,50]},\"properties\":{\"test\":1}}"; - uploadFileToS3(inputS3Prefix(jobId) + "/" + UUID.randomUUID(), S3ContentType.APPLICATION_JSON, data.getBytes(), false); - String data2 = "{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[8,50]},\"properties\":{\"test\":2}}"; - uploadFileToS3(inputS3Prefix(jobId) + "/" + UUID.randomUUID(), S3ContentType.APPLICATION_JSON, data2.getBytes(), false); - } - -} diff --git a/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/StepTest.java b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/StepTest.java new file mode 100644 index 0000000000..496de53f4d --- /dev/null +++ b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/StepTest.java @@ -0,0 +1,41 @@ +/* + * Copyright (C) 2017-2024 HERE Europe B.V. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * License-Filename: LICENSE + */ + +package com.here.xyz.jobs.steps.impl; + +import com.here.xyz.jobs.util.test.StepTestBase; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; + +import java.sql.SQLException; + +public abstract class StepTest extends StepTestBase { + + @BeforeEach + public void setup() throws SQLException { + cleanup(); + createSpace(SPACE_ID); + } + + @AfterEach + public void cleanup() throws SQLException { + deleteSpace(SPACE_ID); + cleanS3Files(JOB_ID); + } +} diff --git a/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/transport/QuickValidatorTest.java b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/transport/QuickValidatorTest.java index 7f71f4d09e..2ce4f8d794 100644 --- a/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/transport/QuickValidatorTest.java +++ b/xyz-jobs/xyz-job-steps/src/test/java/com/here/xyz/jobs/steps/impl/transport/QuickValidatorTest.java @@ -19,8 +19,8 @@ package com.here.xyz.jobs.steps.impl.transport; -import static com.here.xyz.jobs.steps.TestSteps.S3ContentType.APPLICATION_JSON; -import static com.here.xyz.jobs.steps.TestSteps.S3ContentType.TEXT_CSV; +import static com.here.xyz.jobs.util.test.StepTestBase.S3ContentType.APPLICATION_JSON; +import static com.here.xyz.jobs.util.test.StepTestBase.S3ContentType.TEXT_CSV; import static com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.EntityPerLine.Feature; import static com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.EntityPerLine.FeatureCollection; import static com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.Format.CSV_GEOJSON; @@ -31,7 +31,8 @@ import static org.junit.Assert.fail; import com.here.xyz.jobs.steps.Config; -import com.here.xyz.jobs.steps.TestSteps; +import com.here.xyz.jobs.steps.impl.transport.tools.ImportFilesQuickValidator; +import com.here.xyz.jobs.util.test.StepTestBase; import com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.EntityPerLine; import com.here.xyz.jobs.steps.impl.transport.ImportFilesToSpace.Format; import com.here.xyz.jobs.steps.inputs.UploadUrl; @@ -42,7 +43,7 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; -public class QuickValidatorTest extends TestSteps { +public class QuickValidatorTest extends StepTestBase { private static String TEST_PREFIX = "validation-test/"; diff --git a/xyz-jobs/xyz-job-steps/src/test/resources/testFeatureCollections/fcWithMixedGeometryTypes.geojson b/xyz-jobs/xyz-job-steps/src/test/resources/testFeatureCollections/fcWithMixedGeometryTypes.geojson new file mode 100644 index 0000000000..8db4087509 --- /dev/null +++ b/xyz-jobs/xyz-job-steps/src/test/resources/testFeatureCollections/fcWithMixedGeometryTypes.geojson @@ -0,0 +1,331 @@ +{ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "fooroot" : true, + "properties": { + "foo": 1, + "description" : "Line" + }, + "geometry": { + "type": "LineString", + "coordinates": [ + [ + 8.7011718, + 50.157105 + ], + [ + 8.5926818, + 50.084463 + ], + [ + 8.5446166, + 50.117055 + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "foo": 1, + "description" : "Line" + }, + "geometry": { + "type": "LineString", + "coordinates": [ + [ + 8.7197113, + 50.087106 + ], + [ + 8.6338806, + 50.040824 + ], + [ + 8.5116577, + 50.056697 + ], + [ + 8.5082244, + 50.025828 + ] + ] + } + }, + { + "type": "Feature", + "fooroot.nested": "bar", + "properties": { + "foo": 1, + "description" : "MultiLine" + }, + "geometry": { + "type": "MultiLineString", + "coordinates": [ + [ + [ + 8.6400604, + 50.258619 + ], + [ + 8.6956787, + 50.223487 + ], + [ + 8.7705230, + 50.248083 + ], + [ + 8.7890625, + 50.247424 + ] + ], + [ + [ + 8.6919021, + 50.259717 + ], + [ + 8.7035751, + 50.236885 + ] + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "foo": 1, + "description" : "Point", + "geometry" : { + "type" : "onPropertyLevel" + } + }, + "geometry": { + "type": "Point", + "coordinates": [ + 8.6709594, + 50.102964 + ] + } + }, + { + "type": "Feature", + "properties": { + "foo": 1, + "description" : "Point" + }, + "geometry": { + "type": "Point", + "coordinates": [ + 8.5995483, + 50.1293824 + ] + } + }, + { + "type": "Feature", + "properties": { + "foo": 2, + "description" : "Point" + }, + "geometry": { + "type": "Point", + "coordinates": [ + 8.5459899, + 50.063750 + ] + } + }, + { + "type": "Feature", + "properties": { + "foo": 1, + "description" : "MultiPoint" + }, + "geometry": { + "type": "MultiPoint", + "coordinates": [ + [ + 8.7828826, + 50.211185 + ], + [ + 8.8220214, + 50.227002 + ] + ] + } + }, + { + "id": "foo_polygon", + "type": "Feature", + "properties": { + "foo": 2, + "description" : "Polygon" + }, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 8.5459899, + 50.020093 + ], + [ + 8.8199615, + 50.020093 + ], + [ + 8.8199615, + 50.188330 + ], + [ + 8.5459899, + 50.188330 + ], + [ + 8.5459899, + 50.020093 + ] + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "foo": 1, + "bar": null, + "description" : "Polygon with hole" + }, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 8.4004211, + 50.179976 + ], + [ + 8.4704589, + 50.108249 + ], + [ + 8.5926818, + 50.131143 + ], + [ + 8.6579132, + 50.198880 + ], + [ + 8.5659027, + 50.233591 + ], + [ + 8.4574127, + 50.234030 + ], + [ + 8.4004211, + 50.179976 + ] + ], + [ + [ + 8.4725189, + 50.148306 + ], + [ + 8.4725189, + 50.211405 + ], + [ + 8.5741424, + 50.211405 + ], + [ + 8.5741424, + 50.148306 + ], + [ + 8.4725189, + 50.148306 + ] + ] + ] + } + }, + { + "type": "Feature", + "fooroot" : true, + "properties": { + "foo": 1, + "bar": "test", + "foo.nested": null, + "description" : "MultiPolygon" + }, + "geometry": { + "type": "MultiPolygon", + "coordinates": [[ + [ + [ + 8.6277008, + 50.076531 + ], + [ + 8.6496734, + 50.076531 + ], + [ + 8.6496734, + 50.091952 + ], + [ + 8.6277008, + 50.091952 + ], + [ + 8.6277008, + 50.076531 + ] + ] + ], + [[ + [ + 8.6867523, + 50.083582 + ], + [ + 8.7437438, + 50.083582 + ], + [ + 8.7437438, + 50.134664 + ], + [ + 8.6867523, + 50.134664 + ], + [ + 8.6867523, + 50.083582 + ] + ] + ]] + } + }, + { + "type": "Feature", + "properties": { + "foo": 1, + "description" : "without Geometry" + } + } + ] +} \ No newline at end of file diff --git a/xyz-models/src/main/java/com/here/xyz/events/IterateChangesetsEvent.java b/xyz-models/src/main/java/com/here/xyz/events/IterateChangesetsEvent.java index bf1513d7e4..f37c317830 100644 --- a/xyz-models/src/main/java/com/here/xyz/events/IterateChangesetsEvent.java +++ b/xyz-models/src/main/java/com/here/xyz/events/IterateChangesetsEvent.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2017-2020 HERE Europe B.V. + * Copyright (C) 2017-2024 HERE Europe B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,38 +26,36 @@ @JsonIgnoreProperties(ignoreUnknown = true) @JsonTypeName(value = "IterateChangesetsEvent") public final class IterateChangesetsEvent extends SearchForFeaturesEvent { - private String pageToken; @JsonInclude(JsonInclude.Include.NON_DEFAULT) - private Long startVersion; + private long startVersion; @JsonInclude(JsonInclude.Include.NON_DEFAULT) - private Long endVersion; + private long endVersion = -1; private int versionsToKeep; - private boolean useCollection; - public Long getStartVersion() { + public long getStartVersion() { return startVersion; } - public void setStartVersion(Long startVersion) { + public void setStartVersion(long startVersion) { this.startVersion = startVersion; } - public IterateChangesetsEvent withStartVersion(Long startVersion) { + public IterateChangesetsEvent withStartVersion(long startVersion) { setStartVersion(startVersion); return this; } - public Long getEndVersion() { + public long getEndVersion() { return endVersion; } - public void setEndVersion(Long endVersion) { + public void setEndVersion(long endVersion) { this.endVersion = endVersion; } - public IterateChangesetsEvent withEndVersion(Long endVersion) { + public IterateChangesetsEvent withEndVersion(long endVersion) { setEndVersion(endVersion); return this; } @@ -90,13 +88,4 @@ public IterateChangesetsEvent withVersionsToKeep(int setVersionsToKeep) { setVersionsToKeep(versionsToKeep); return this; } - - public boolean isUseCollection() { return useCollection; } - - public void setUseCollection(boolean useCollection) { this.useCollection = useCollection; } - - public IterateChangesetsEvent withUseCollection(boolean useCollection) { - setUseCollection(useCollection); - return this; - } } diff --git a/xyz-models/src/main/java/com/here/xyz/events/PropertiesQuery.java b/xyz-models/src/main/java/com/here/xyz/events/PropertiesQuery.java index 83b0d1b52b..bafca9118f 100644 --- a/xyz-models/src/main/java/com/here/xyz/events/PropertiesQuery.java +++ b/xyz-models/src/main/java/com/here/xyz/events/PropertiesQuery.java @@ -20,12 +20,23 @@ package com.here.xyz.events; +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; +import java.util.Map; +import java.util.stream.Stream; public class PropertiesQuery extends ArrayList { + private static final String F_PREFIX = "f."; + private static final Map SEARCH_KEY_REPLACEMENTS = Map.of( + "f.id", "id", + "f.createdAt", "properties.@ns:com:here:xyz.createdAt", + "f.updatedAt", "properties.@ns:com:here:xyz.updatedAt", + "f.tags", "properties.@ns:com:here:xyz.tags" + ); public PropertiesQuery filterOutNamedProperty(String... propertyNames) { if (propertyNames == null || propertyNames.length == 0) return this; @@ -39,4 +50,143 @@ public PropertiesQuery filterOutNamedProperty(String... propertyNames) { return this; } + + public List getQueryKeys(){ + ArrayList keyList = new ArrayList(); + for (PropertyQueryList queries : this){ + for(PropertyQuery query : queries){ + keyList.add(query.getKey()); + } + } + return keyList; + } + + public static PropertiesQuery fromString(String query) { + return fromString(query, "", false); + } + + public static PropertiesQuery fromString(String query, String property, boolean spaceProperties) { + if (query == null || query.length() == 0) + return null; + + PropertyQueryList pql = new PropertyQueryList(); + Stream.of(query.split("&")) + .map(queryParam -> queryParam.startsWith("tags=") ? transformLegacyTags(queryParam) : queryParam) + .filter(queryParam -> queryParam.startsWith("p.") || queryParam.startsWith(F_PREFIX) || spaceProperties) + .forEach(keyValuePair -> { + PropertyQuery propertyQuery = new PropertyQuery(); + + String operatorComma = "-#:comma:#-"; + try { + keyValuePair = keyValuePair.replaceAll(",", operatorComma); + keyValuePair = URLDecoder.decode(keyValuePair, "utf-8"); + } catch (UnsupportedEncodingException e) { + e.printStackTrace(); + } + + int position=0; + String op=null; + + //store "main" operator. Needed for such cases foo=bar-->test + for (String shortOperator : PropertyQuery.QueryOperation.inputRepresentations()) { + int currentPositionOfOp = keyValuePair.indexOf(shortOperator); + if (currentPositionOfOp != -1) { + if( + // feature properties query + (!spaceProperties && (op == null || currentPositionOfOp < position || ( currentPositionOfOp == position && op.length() < shortOperator.length() ))) || + // space properties query + (keyValuePair.substring(0,currentPositionOfOp).equals(property) && spaceProperties && (op == null || currentPositionOfOp < position || ( currentPositionOfOp == position && op.length() < shortOperator.length() ))) + ) { + op = shortOperator; + position = currentPositionOfOp; + } + } + } + + if (op != null) { + String[] keyVal = new String[] { + keyValuePair.substring(0, position).replaceAll(operatorComma,","), + keyValuePair.substring(position + op.length()) + }; + //Cut from API-Gateway appended "=" + if ((">".equals(op) || "<".equals(op)) && keyVal[1].endsWith("=")) + keyVal[1] = keyVal[1].substring(0, keyVal[1].length() - 1); + + propertyQuery.setKey(spaceProperties ? keyVal[0] : getConvertedKey(keyVal[0])); + propertyQuery.setOperation(PropertyQuery.QueryOperation.fromInputRepresentation(op)); + String[] rawValues = keyVal[1].split(operatorComma); + + ArrayList values = new ArrayList<>(); + for (String rawValue : rawValues) + values.add(getConvertedValue(rawValue)); + + propertyQuery.setValues(values); + pql.add(propertyQuery); + } + }); + + PropertiesQuery pq = new PropertiesQuery(); + pq.add(pql); + + if (pq.stream().flatMap(List::stream).mapToLong(l -> l.getValues().size()).sum() == 0) + return null; + + return pq; + } + + public static String getConvertedKey(String rawKey) { + if (rawKey.startsWith("p.")) + return rawKey.replaceFirst("p.", "properties."); + + String replacement = SEARCH_KEY_REPLACEMENTS.get(rawKey); + + //Allow root property search by using f. + if (replacement == null && rawKey.startsWith(F_PREFIX)) + return rawKey.substring(2); + + return replacement; + } + + public static Object getConvertedValue(String rawValue) { + // Boolean + if (rawValue.equals("true")) { + return true; + } + if (rawValue.equals("false")) { + return false; + } + // Long + try { + return Long.parseLong(rawValue); + } catch (NumberFormatException ignored) { + } + // Double + try { + return Double.parseDouble(rawValue); + } catch (NumberFormatException ignored) { + } + + if (rawValue.length() > 2 && rawValue.charAt(0) == '"' && rawValue.charAt(rawValue.length() - 1) == '"') { + return rawValue.substring(1, rawValue.length() - 1); + } + + if (rawValue.length() > 2 && rawValue.charAt(0) == '\'' && rawValue.charAt(rawValue.length() - 1) == '\'') { + return rawValue.substring(1, rawValue.length() - 1); + } + + if(rawValue.equalsIgnoreCase(".null")) + return null; + + // String + return rawValue; + } + + private static String transformLegacyTags(String legacyTagsQuery) { + String[] tagQueryParts = legacyTagsQuery.split("="); + if (tagQueryParts.length != 2) + return legacyTagsQuery; + String tags = tagQueryParts[1]; + + return F_PREFIX + "tags" + "=cs=" + tags; + } } diff --git a/xyz-models/src/main/java/com/here/xyz/events/SelectiveEvent.java b/xyz-models/src/main/java/com/here/xyz/events/SelectiveEvent.java index 4b562e9db1..af99c29760 100644 --- a/xyz-models/src/main/java/com/here/xyz/events/SelectiveEvent.java +++ b/xyz-models/src/main/java/com/here/xyz/events/SelectiveEvent.java @@ -19,13 +19,15 @@ package com.here.xyz.events; +import static com.here.xyz.models.hub.Ref.HEAD; + import com.here.xyz.models.hub.Ref; import java.util.List; public class SelectiveEvent extends ContextAwareEvent { private List selection; private boolean force2D; - private Ref ref = new Ref("HEAD"); + private Ref ref = new Ref(HEAD); private long minVersion; @SuppressWarnings("unused") diff --git a/xyz-models/src/main/java/com/here/xyz/models/hub/Ref.java b/xyz-models/src/main/java/com/here/xyz/models/hub/Ref.java index 64eff14ea1..29404e02ee 100644 --- a/xyz-models/src/main/java/com/here/xyz/models/hub/Ref.java +++ b/xyz-models/src/main/java/com/here/xyz/models/hub/Ref.java @@ -28,68 +28,68 @@ public class Ref implements XyzSerializable { public static final String ALL_VERSIONS = "*"; private String tag; private long version = -1; - private long fromVersion = -1; + private long startVersion = -1; private boolean head; private boolean allVersions; @JsonCreator public Ref(String ref) { - - try { if (ref == null || ref.isEmpty() || HEAD.equals(ref)) head = true; else if (ALL_VERSIONS.equals(ref)) allVersions = true; - else if ( ref.indexOf("..") > 0 ) { - String s[] = ref.split("\\.\\."); - fromVersion = Long.parseLong(s[0]); - setVersion(Long.parseLong(s[1])); - if( fromVersion >= getToVersion() ) - throw new InvalidRef("Invalid ref: version range - fromVersion must be less then toVersion : \"" + ref + "\""); - } + else if (ref.contains("..")) + try { + String[] rangeParts = ref.split("\\.\\."); + startVersion = validateVersion(Long.parseLong(rangeParts[0])); + version = validateVersion(Long.parseLong(rangeParts[1])); + if (getStartVersion() >= getEndVersion()) + throw new InvalidRef("Invalid ref: The provided version-range is invalid. The start-version must be less than the end-version: " + + "\"" + ref + "\""); + } + catch (NumberFormatException e) { + throw new InvalidRef("Invalid ref: The provided version-range is invalid: \"" + ref + "\""); + } else - setVersion(Long.parseLong(ref)); - } - catch (NumberFormatException e) { - if (!Tag.isValidId(ref)) - throw new InvalidRef("Invalid ref: the provided ref is not a valid ref or version or range: \"" + ref + "\""); - - tag = ref; - } - + try { + version = validateVersion(Long.parseLong(ref)); + } + catch (NumberFormatException e) { + if (!Tag.isValidId(ref)) + throw new InvalidRef("Invalid ref: the provided ref is not a valid ref or version: \"" + ref + "\""); + + tag = ref; + } } - public Ref(long version) { - setVersion(version); + this.version = validateVersion(version); } /** - * Validates & sets the version internally(!). - * NOTE: This method should stay private to keep the immutability of this Ref model. - * @param version + * Validates a version number. + * @param version The version to validate + * @returns The validated version for further usage inside an expression */ - private void setVersion(long version) { + private long validateVersion(long version) { if (version < 0) throw new InvalidRef("Invalid ref: The provided version number may not be lower than 0"); - - this.version = version; + return version; } @JsonValue @Override public String toString() { if (!isTag() && version < 0 && !head && !allVersions && !isRange()) - throw new IllegalArgumentException("Not a valid ref"); + throw new InvalidRef("Not a valid ref"); if (isTag()) return tag; if (head) return HEAD; if (allVersions) return ALL_VERSIONS; - if( isRange() ) - return String.format("%d..%d",fromVersion,version); - + if (isRange()) + return startVersion + ".." + version; return String.valueOf(version); } @@ -116,7 +116,6 @@ public String getTag() { /** * The version being referenced by this ref object. * A valid version is an integer >= 0 where 0 is the very first version of an empty space just after having been created. - * TODO: Fix DB queries accordingly to take into account the empty space as first history state */ public long getVersion() { if (!isSingleVersion()) @@ -126,11 +125,15 @@ public long getVersion() { return version; } - public long getFromVersion() { - return fromVersion; + public long getStartVersion() { + if (!isRange()) + throw new NumberFormatException("Ref is not depicting a version range."); + return startVersion; } - public long getToVersion() { + public long getEndVersion() { + if (!isRange()) + throw new NumberFormatException("Ref is not depicting a version range."); return version; } @@ -147,7 +150,7 @@ public boolean isSingleVersion() { } public boolean isRange() { - return fromVersion >= 0; + return startVersion >= 0; } public static class InvalidRef extends IllegalArgumentException { diff --git a/xyz-models/src/main/java/com/here/xyz/responses/changesets/Changeset.java b/xyz-models/src/main/java/com/here/xyz/responses/changesets/Changeset.java index cf5dc4d159..bfe670582e 100644 --- a/xyz-models/src/main/java/com/here/xyz/responses/changesets/Changeset.java +++ b/xyz-models/src/main/java/com/here/xyz/responses/changesets/Changeset.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2017-2022 HERE Europe B.V. + * Copyright (C) 2017-2024 HERE Europe B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,7 +19,7 @@ package com.here.xyz.responses.changesets; -import com.fasterxml.jackson.annotation.*; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.here.xyz.models.geojson.implementation.FeatureCollection; import com.here.xyz.responses.XyzResponse; @@ -30,105 +30,104 @@ */ @JsonInclude(Include.NON_DEFAULT) public class Changeset extends XyzResponse { - - long version = -1; - String author; - long createdAt; - private FeatureCollection inserted; - private FeatureCollection updated; - private FeatureCollection deleted; - private String nextPageToken; - - public long getVersion() { - return version; - } - - public void setVersion(long version) { - this.version = version; - } - - public Changeset withVersion(long version) { - setVersion(version); - return this; - } - - public String getAuthor() { - return author; - } - - public void setAuthor(String author) { - this.author = author; - } - - public Changeset withAuthor(String author) { - setAuthor(author); - return this; - } - - public long getCreatedAt() { - return createdAt; - } - - public void setCreatedAt(long createdAt) { - this.createdAt = createdAt; - } - - public Changeset withCreatedAt(long createdAt) { - setCreatedAt(createdAt); - return this; - } - - public FeatureCollection getInserted() { - return inserted; - } - - public void setInserted(FeatureCollection inserted) { - this.inserted = inserted; - } - - public Changeset withInserted(final FeatureCollection inserted) { - setInserted(inserted); - return this; - } - - public FeatureCollection getUpdated() { - return updated; - } - - public void setUpdated(FeatureCollection updated) { - this.updated = updated; - } - - public Changeset withUpdated(final FeatureCollection updated) { - setUpdated(updated); - return this; - } - - public FeatureCollection getDeleted() { - return deleted; - } - - public void setDeleted(FeatureCollection deleted) { - this.deleted = deleted; - } - - public Changeset withDeleted(final FeatureCollection deleted) { - setDeleted(deleted); - return this; - } - - @SuppressWarnings("unused") - public String getNextPageToken() { - return nextPageToken; - } - - @SuppressWarnings("WeakerAccess") - public void setNextPageToken(String nextPageToken) { - this.nextPageToken = nextPageToken; - } - - public Changeset withNextPageToken(final String nextPageToken) { - setNextPageToken(nextPageToken); - return this; - } + long version = -1; + String author; + long createdAt; + private FeatureCollection inserted; + private FeatureCollection updated; + private FeatureCollection deleted; + private String nextPageToken; + + public long getVersion() { + return version; + } + + public void setVersion(long version) { + this.version = version; + } + + public Changeset withVersion(long version) { + setVersion(version); + return this; + } + + public String getAuthor() { + return author; + } + + public void setAuthor(String author) { + this.author = author; + } + + public Changeset withAuthor(String author) { + setAuthor(author); + return this; + } + + public long getCreatedAt() { + return createdAt; + } + + public void setCreatedAt(long createdAt) { + this.createdAt = createdAt; + } + + public Changeset withCreatedAt(long createdAt) { + setCreatedAt(createdAt); + return this; + } + + public FeatureCollection getInserted() { + return inserted; + } + + public void setInserted(FeatureCollection inserted) { + this.inserted = inserted; + } + + public Changeset withInserted(final FeatureCollection inserted) { + setInserted(inserted); + return this; + } + + public FeatureCollection getUpdated() { + return updated; + } + + public void setUpdated(FeatureCollection updated) { + this.updated = updated; + } + + public Changeset withUpdated(final FeatureCollection updated) { + setUpdated(updated); + return this; + } + + public FeatureCollection getDeleted() { + return deleted; + } + + public void setDeleted(FeatureCollection deleted) { + this.deleted = deleted; + } + + public Changeset withDeleted(final FeatureCollection deleted) { + setDeleted(deleted); + return this; + } + + @SuppressWarnings("unused") + public String getNextPageToken() { + return nextPageToken; + } + + @SuppressWarnings("WeakerAccess") + public void setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + } + + public Changeset withNextPageToken(final String nextPageToken) { + setNextPageToken(nextPageToken); + return this; + } } diff --git a/xyz-models/src/main/java/com/here/xyz/responses/changesets/ChangesetCollection.java b/xyz-models/src/main/java/com/here/xyz/responses/changesets/ChangesetCollection.java index 82b2b39be5..f6456b1a09 100644 --- a/xyz-models/src/main/java/com/here/xyz/responses/changesets/ChangesetCollection.java +++ b/xyz-models/src/main/java/com/here/xyz/responses/changesets/ChangesetCollection.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2017-2023 HERE Europe B.V. + * Copyright (C) 2017-2024 HERE Europe B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,65 +30,65 @@ @JsonTypeName(value = "ChangesetCollection") @JsonInclude(JsonInclude.Include.NON_EMPTY) public class ChangesetCollection extends XyzResponse { - private int startVersion; - private int endVersion; - - @JsonTypeInfo( use = JsonTypeInfo.Id.NONE ) - @JsonInclude(JsonInclude.Include.ALWAYS) - private Map versions; - - private String nextPageToken; - - @SuppressWarnings("unused") - public String getNextPageToken() { - return nextPageToken; - } - - @SuppressWarnings("WeakerAccess") - public void setNextPageToken(String nextPageToken) { - this.nextPageToken = nextPageToken; - } - - public ChangesetCollection withNextPageToken(final String nextPageToken) { - setNextPageToken(nextPageToken); - return this; - } - public int getStartVersion() { - return startVersion; - } - - public void setStartVersion(int startVersion) { - this.startVersion = startVersion; - } - - public ChangesetCollection withStartVersion(final Integer startVersion) { - setStartVersion(startVersion); - return this; - } - - public int getEndVersion() { - return endVersion; - } - - public void setEndVersion(Integer endVersion) { - this.endVersion = endVersion; - } - - public ChangesetCollection withEndVersion(final Integer withEndVersion) { - setEndVersion(withEndVersion); - return this; - } - - public Map getVersions() { - return versions; - } - - public void setVersions(Map versions) { - this.versions = versions; - } - - public ChangesetCollection withVersions(final Map versions) { - setVersions(versions); - return this; - } + private long startVersion; + private long endVersion; + + @JsonTypeInfo(use = JsonTypeInfo.Id.NONE) + @JsonInclude(JsonInclude.Include.ALWAYS) + private Map versions; + + private String nextPageToken; + + @SuppressWarnings("unused") + public String getNextPageToken() { + return nextPageToken; + } + + @SuppressWarnings("WeakerAccess") + public void setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + } + + public ChangesetCollection withNextPageToken(final String nextPageToken) { + setNextPageToken(nextPageToken); + return this; + } + public long getStartVersion() { + return startVersion; + } + + public void setStartVersion(long startVersion) { + this.startVersion = startVersion; + } + + public ChangesetCollection withStartVersion(long startVersion) { + setStartVersion(startVersion); + return this; + } + + public long getEndVersion() { + return endVersion; + } + + public void setEndVersion(long endVersion) { + this.endVersion = endVersion; + } + + public ChangesetCollection withEndVersion(long withEndVersion) { + setEndVersion(withEndVersion); + return this; + } + + public Map getVersions() { + return versions; + } + + public void setVersions(Map versions) { + this.versions = versions; + } + + public ChangesetCollection withVersions(final Map versions) { + setVersions(versions); + return this; + } } diff --git a/xyz-psql-connector/src/main/java/com/here/xyz/psql/DatabaseHandler.java b/xyz-psql-connector/src/main/java/com/here/xyz/psql/DatabaseHandler.java index 9103c5a755..9940f7f073 100644 --- a/xyz-psql-connector/src/main/java/com/here/xyz/psql/DatabaseHandler.java +++ b/xyz-psql-connector/src/main/java/com/here/xyz/psql/DatabaseHandler.java @@ -25,11 +25,11 @@ import static com.here.xyz.psql.DatabaseWriter.ModificationType.INSERT; import static com.here.xyz.psql.DatabaseWriter.ModificationType.UPDATE; import static com.here.xyz.psql.query.XyzEventBasedQueryRunner.readTableFromEvent; +import static com.here.xyz.responses.XyzError.NOT_IMPLEMENTED; import com.fasterxml.jackson.core.JsonProcessingException; import com.here.xyz.connectors.ErrorResponseException; import com.here.xyz.connectors.StorageConnector; -import com.here.xyz.connectors.runtime.ConnectorRuntime; import com.here.xyz.events.Event; import com.here.xyz.events.GetFeaturesByIdEvent; import com.here.xyz.events.ModifyFeaturesEvent; @@ -44,17 +44,14 @@ import com.here.xyz.psql.query.helpers.versioning.GetNextVersion; import com.here.xyz.responses.ErrorResponse; import com.here.xyz.responses.XyzError; -import com.here.xyz.responses.XyzResponse; +import com.here.xyz.util.Random; import com.here.xyz.util.db.ConnectorParameters; -import com.here.xyz.util.db.DatabaseSettings; import com.here.xyz.util.db.ECPSTool; import com.here.xyz.util.db.SQLQuery; import com.here.xyz.util.db.datasource.CachedPooledDataSources; import com.here.xyz.util.db.datasource.DataSourceProvider; -import com.here.xyz.util.db.datasource.StaticDataSources; -import com.here.xyz.util.db.pg.Script; -import java.io.IOException; -import java.net.URISyntaxException; +import com.here.xyz.util.db.datasource.DatabaseSettings; +import com.here.xyz.util.runtime.FunctionRuntime; import java.sql.BatchUpdateException; import java.sql.Connection; import java.sql.SQLException; @@ -66,19 +63,20 @@ import java.util.Map; import java.util.Objects; import java.util.Optional; -import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; public abstract class DatabaseHandler extends StorageConnector { - + /**TODO: + * shift xyz_ext.sql and h3Core.sql to own folder and avoid double installation. + * Currently we are using common.sql in hub-service AND job-service. So we need the installation on both ends. + */ + private static final String SCRIPT_RESOURCE_PATH = "/sql"; public static final String ECPS_PHRASE = "ECPS_PHRASE"; private static final Logger logger = LogManager.getLogger(); private static final String MAINTENANCE_ENDPOINT = "MAINTENANCE_SERVICE_ENDPOINT"; - public static final int SCRIPT_VERSIONS_TO_KEEP = 5; - private static Map> sqlScripts = new ConcurrentHashMap<>(); /** * Lambda Execution Time = 25s. We are actively canceling queries after STATEMENT_TIMEOUT_SECONDS @@ -111,45 +109,18 @@ protected void initialize(Event event) { //Decrypt the ECPS into an instance of DatabaseSettings dbSettings = new DatabaseSettings(connectorId, - ECPSTool.decryptToMap(ConnectorRuntime.getInstance().getEnvironmentVariable(ECPS_PHRASE), connectorParams.getEcps())) - .withApplicationName(ConnectorRuntime.getInstance().getApplicationName()); - - dbSettings.withSearchPath(checkScripts(dbSettings)); + ECPSTool.decryptToMap(FunctionRuntime.getInstance().getEnvironmentVariable(ECPS_PHRASE), connectorParams.getEcps())) + .withApplicationName(FunctionRuntime.getInstance().getApplicationName()) + .withScriptResourcePaths(List.of(SCRIPT_RESOURCE_PATH)); + //TODO - set scriptResourcePath if ext & h3 functions should get installed here. dataSourceProvider = new CachedPooledDataSources(dbSettings); retryAttempted = false; dbMaintainer = new DatabaseMaintainer(dataSourceProvider, dbSettings, connectorParams, - ConnectorRuntime.getInstance().getEnvironmentVariable(MAINTENANCE_ENDPOINT)); + FunctionRuntime.getInstance().getEnvironmentVariable(MAINTENANCE_ENDPOINT)); DataSourceProvider.setDefaultProvider(dataSourceProvider); } - /** - * Checks whether the latest version of all SQL scripts is installed on the DB and returns all script schemas for the use in the - * search path. - * @return The script schema names (including the newest script version for each script) to be used in the search path - */ - private synchronized static List checkScripts(DatabaseSettings dbSettings) { - String softwareVersion = ConnectorRuntime.getInstance().getSoftwareVersion(); - if (!sqlScripts.containsKey(dbSettings.getId())) { - logger.info("Checking scripts for connector {} ...", dbSettings.getId()); - try (DataSourceProvider dataSourceProvider = new StaticDataSources(dbSettings)) { - List