diff --git a/openidm-repo-jdbc/pom.xml b/openidm-repo-jdbc/pom.xml index c5fee78a5b..a2429704d0 100644 --- a/openidm-repo-jdbc/pom.xml +++ b/openidm-repo-jdbc/pom.xml @@ -35,6 +35,7 @@ !org.testng.annotations,!sun.misc,!org.w3c.dom,javax.inject;resolution:=optional + 1.19.3 @@ -106,9 +107,9 @@ com.google.guava guava - 14.0.1 provided + com.zaxxer HikariCP @@ -123,6 +124,81 @@ test + + org.testcontainers + testcontainers + ${testcontainers.version} + test + + + + org.testcontainers + db2 + ${testcontainers.version} + test + + + + com.ibm.db2 + jcc + 11.5.9.0 + + + + org.testcontainers + mssqlserver + ${testcontainers.version} + test + + + + com.microsoft.sqlserver + mssql-jdbc + 12.4.2.jre11 + + + + org.testcontainers + oracle-xe + ${testcontainers.version} + test + + + + com.oracle.database.jdbc + ojdbc11 + 21.9.0.0 + test + + + + org.testcontainers + mysql + ${testcontainers.version} + test + + + + com.mysql + mysql-connector-j + 8.3.0 + test + + + + org.testcontainers + postgresql + ${testcontainers.version} + test + + + + org.postgresql + postgresql + 42.7.1 + test + + org.assertj assertj-core @@ -138,14 +214,14 @@ org.hsqldb hsqldb - 2.2.9 + 2.7.2 test - mysql - mysql-connector-java - 5.1.25 + org.hsqldb + sqltool + 2.7.2 test @@ -216,6 +292,34 @@ + + + org.apache.maven.plugins + maven-failsafe-plugin + + + + + run-its + + + + + maven-failsafe-plugin + + + + + integration-test + verify + + + + + + + + diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/Constants.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/Constants.java index 96510fa324..746145dedf 100644 --- a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/Constants.java +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/Constants.java @@ -12,50 +12,39 @@ * information: "Portions copyright [year] [name of copyright owner]". * * Copyright 2016 ForgeRock AS. + * Portions Copyright 2024 Wren Security. */ package org.forgerock.openidm.repo.jdbc; /** - * - * JDBC Repository Service Constants + * JDBC Repository Service constants. */ public class Constants { /** - * Raw Object Rev - * - * The DB Table column representing the Object revision + * DB Table column representing the Object revision. */ public static final String RAW_OBJECT_REV = "rev"; - + /** - * Raw Id - * - * The ID of the row representing the Object within the DB Table + * ID of the row representing the Object within the DB Table. */ public static final String RAW_ID = "id"; - + /** - * Raw Object Type Id - * - * The ObjectTypes ID of the Object within the DB Table - */ + * ObjectTypes ID of the Object within the DB Table. + */ public static final String RAW_OBJECTTYPES_ID = "objecttypes_id"; - + /** - * The Object Id - * - * The ID of the Mapped Object - */ + * ID of the Mapped Object. + */ public static final String OBJECT_ID = "_id"; - + /** - * The Object Revision - * - * The Revision of the Mapped Object - */ + * Revision of the Mapped Object + */ public static final String OBJECT_REV = "_rev"; - } diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/DatabaseType.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/DatabaseType.java index ea87a0c3b6..73f63de31b 100644 --- a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/DatabaseType.java +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/DatabaseType.java @@ -24,7 +24,7 @@ package org.forgerock.openidm.repo.jdbc; /** - * @version $Revision$ $Date$ + * Supported database types. */ public enum DatabaseType { SQLSERVER, MYSQL, POSTGRESQL, ORACLE, DB2, H2, ANSI_SQL99, ODBC; diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/ErrorType.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/ErrorType.java index 4a1bafc8f8..9493c4ec76 100644 --- a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/ErrorType.java +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/ErrorType.java @@ -24,7 +24,7 @@ package org.forgerock.openidm.repo.jdbc; /** - * Portable error type identifiers + * Portable error type identifiers. */ public enum ErrorType { CONNECTION_FAILURE, diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/SQLExceptionHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/SQLExceptionHandler.java index e2b876ea9a..f218f7680c 100644 --- a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/SQLExceptionHandler.java +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/SQLExceptionHandler.java @@ -2,6 +2,7 @@ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright © 2011 ForgeRock AS. All rights reserved. + * Portions Copyright 2024 Wren Security. * * The contents of this file are subject to the terms * of the Common Development and Distribution License @@ -27,27 +28,29 @@ import java.sql.SQLException; /** - * Interface to help handle SQLExceptions across different DB implementations - * + * Interface to help handle SQLExceptions across different DB implementations. */ public interface SQLExceptionHandler { - + /** - * Query if a given exception signifies a well known error type - * + * Query if a given exception signifies a well known error type. + * + *

* Allows table handlers to abstract database specific differences in reporting errors. - * - * @param ex The exception thrown by the database + * + * @param exception the exception thrown by the database * @param errorType the error type to test against * @return true if the exception matches the error type passed */ - boolean isErrorType(SQLException ex, ErrorType errorType); - + boolean isErrorType(SQLException exception, ErrorType errorType); + /** - * As whether a given exception should be retried - * @param ex the exception thrown by the database + * As whether a given exception should be retried. + * + * @param exception the exception thrown by the database * @param connection where the failure occured, used for additional context * @return true if the expectation is that transaction should be retried by the application */ - boolean isRetryable(SQLException ex, Connection connection); + boolean isRetryable(SQLException exception, Connection connection); + } \ No newline at end of file diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/TableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/TableHandler.java index 9e6242917a..522ab53c84 100644 --- a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/TableHandler.java +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/TableHandler.java @@ -2,6 +2,7 @@ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright © 2011-2015 ForgeRock AS. All rights reserved. + * Portions Copyright 2023 Wren Security. * * The contents of this file are subject to the terms * of the Common Development and Distribution License @@ -28,190 +29,233 @@ import java.sql.SQLException; import java.util.List; import java.util.Map; - import org.forgerock.json.JsonPointer; import org.forgerock.json.resource.BadRequestException; -import org.forgerock.json.resource.ConflictException; -import org.forgerock.json.resource.ForbiddenException; import org.forgerock.json.resource.InternalServerErrorException; import org.forgerock.json.resource.NotFoundException; import org.forgerock.json.resource.PreconditionFailedException; -import org.forgerock.json.resource.ResourceResponse; import org.forgerock.json.resource.ResourceException; +import org.forgerock.json.resource.ResourceResponse; +import org.forgerock.openidm.repo.QueryConstants; import org.forgerock.util.query.QueryFilter; +/** + * Handler responsible for performing SQL operations on the underlying data source. + * + *

There are two different strategies represented by the respective implementations: + * + *

+ */ public interface TableHandler { /** - * Gets an object from the repository by identifier. The returned object is not validated + * Get an object from the repository by its identifier. The returned object is not validated * against the current schema and may need processing to conform to an updated schema. + * *

* The object will contain metadata properties, including object identifier {@code _id}, - * and object version {@code _rev} to enable optimistic concurrency + * and object version {@code _rev} to enable optimistic concurrency. * - * @param fullId the qualified identifier of the object to retrieve from the object set. - * @param type is the qualifier of the object to retrieve + * @param fullId the qualified identifier of the object to retrieve from the object set + * @param type the qualifier of the object to retrieve * @param localId the identifier without the qualifier of the object to retrieve - * @param connection - * @throws NotFoundException if the specified object could not be found. + * @param connection database connection to use + * @throws NotFoundException if the specified object could not be found + * @throws IOException in case of JSON processing error * @throws SQLException if a DB failure was reported - * @throws IOException if a failure to convert the JSON model was reported * @throws InternalServerErrorException if the operation failed because of a (possibly transient) failure - * @return the requested object. + * @return the requested object */ - public abstract ResourceResponse read(String fullId, String type, - String localId, Connection connection) - throws SQLException, IOException, ResourceException; + ResourceResponse read(String fullId, String type, String localId, Connection connection) + throws NotFoundException, IOException, SQLException; /** - * Creates a new object in the object set. + * Create a new object in the object set. + * *

- * This method sets the {@code _id} property to the assigned identifier for the object, - * and the {@code _rev} property to the revised object version (For optimistic concurrency) - * - * @param fullId the client-generated identifier to use, or {@code null} if server-generated identifier is requested. - * @param type - * @param localId - * @param obj the contents of the object to create in the object set. - * @param connection - * @throws NotFoundException if the specified id could not be resolved. - * @throws ForbiddenException if access to the object or object set is forbidden. - * @throws PreconditionFailedException if an object with the same ID already exists. + * This method mutates the provided object by setting the {@code _id} property to the + * assigned identifier for the object and the {@code _rev} property to the revised object + * version (for optimistic concurrency). + * + * @param fullId the client-generated identifier to use, or {@code null} if server-generated + * identifier is requested + * @param type the qualifier of the object to create + * @param localId the identifier without the qualifier (if specified in {@code fullId} parameter) + * @param obj the contents of the object to create in the object set + * @param connection database connection to use + * @throws PreconditionFailedException if an object with the same ID already exists * @throws InternalServerErrorException if the operation failed because of a (possibly transient) failure - * @throws java.io.IOException - * @throws java.sql.SQLException + * @throws IOException in case of JSON processing error + * @throws SQLException if a DB failure is reported */ - public abstract void create(String fullId, String type, String localId, - Map obj, Connection connection) - throws SQLException, IOException, ResourceException; + void create(String fullId, String type, String localId, Map obj, Connection connection) + throws PreconditionFailedException, InternalServerErrorException, IOException, SQLException; /** - * Updates the specified object in the object set. + * Update the specified object in the object set. + * + *

+ * This implementation requires MVCC and hence enforces that clients state what revision they expect + * to be updating. + * *

- * This implementation requires MVCC and hence enforces that clients state what revision they expect - * to be updating - * - * If successful, this method updates metadata properties within the passed object, - * including: a new {@code _rev} value for the revised object's version - * - * @param fullId the identifier of the object to be put, or {@code null} to request a generated identifier. - * @param type - * @param localId - * @param rev the version of the object to update; or {@code null} if not provided. - * @param obj the contents of the object to put in the object set. - * @param connection - * @throws ConflictException if version is required but is {@code null}. - * @throws ForbiddenException if access to the object is forbidden. - * @throws NotFoundException if the specified object could not be found. - * @throws PreconditionFailedException if version did not match the existing object in the set. + * This method mutates the provided object by updating {@code _rev} property value for the revised + * object's version. + * + * @param fullId the identifier of the object to be updated + * @param type the qualifier of the object to update + * @param localId the identifier without the qualifier + * @param rev the version of the object to update + * @param obj the contents of the object to put in the object set + * @param connection database connection to use + * @throws NotFoundException if the specified object could not be found + * @throws PreconditionFailedException if version did not match the existing object in the set * @throws BadRequestException if the passed identifier is invalid * @throws InternalServerErrorException if the operation failed because of a (possibly transient) failure - * @throws java.io.IOException - * @throws java.sql.SQLException + * @throws IOException in case of JSON processing error + * @throws SQLException if a DB failure is reported */ - public abstract void update(String fullId, String type, String localId, - String rev, Map obj, Connection connection) - throws SQLException, IOException, ResourceException; + void update(String fullId, String type, String localId, String rev, Map obj, + Connection connection) throws NotFoundException, PreconditionFailedException, + BadRequestException, InternalServerErrorException, IOException, SQLException; /** - * Deletes the specified object from the object set. - * - * @param fullId the identifier of the object to be deleted. - * @param type - * @param localId - * @param rev the version of the object to delete or {@code null} if not provided. - * @param connection - * @throws NotFoundException if the specified object could not be found. - * @throws ForbiddenException if access to the object is forbidden. - * @throws ConflictException if version is required but is {@code null}. - * @throws PreconditionFailedException if version did not match the existing object in the set. + * Delete the specified object from the object set. + * + * @param fullId the identifier of the object to be deleted + * @param type the qualifier of the object to delete + * @param localId the identifier without the qualifier + * @param rev the version of the object to delete or {@code *} to match any version + * @param connection database connection to use + * @throws NotFoundException if the specified object could not be found + * @throws PreconditionFailedException if version did not match the existing object in the set * @throws InternalServerErrorException if the operation failed because of a (possibly transient) failure - * @throws java.io.IOException - * @throws java.sql.SQLException + * @throws SQLException if a DB failure is reported */ - public abstract void delete(String fullId, String type, String localId, - String rev, Connection connection) - throws SQLException, IOException, ResourceException; + void delete(String fullId, String type, String localId, String rev, Connection connection) + throws SQLException, ResourceException; /** - * Performs the query on the specified object and returns the associated results. + * Perform a query on the specified object set and return the associated results. + * *

* Queries are parametric; a set of named parameters is provided as the query criteria. - * The query result is a JSON object structure composed of basic Java types. - * - * The returned map is structured as follow: - * - The top level map contains meta-data about the query, plus an entry with the actual result records. - * - The QueryConstants defines the map keys, including the result records (QUERY_RESULT) - * - * @param type identifies the object to query. - * @param params the parameters of the query to perform. - * @param connection - * @return the query results, which includes meta-data and the result records in JSON object structure format. - * @throws NotFoundException if the specified object could not be found. - * @throws BadRequestException if the specified params contain invalid arguments, e.g. a query id that is not - * configured, a query expression that is invalid, or missing query substitution tokens. - * @throws ForbiddenException if access to the object or specified query is forbidden. + * The query result is a JSON object structure composed of basic Java types. + * + *

+ * The query parameters map is a simple shallow map that consists of two types + * of key-value pairs: + * + *

+ * + * @param type identifies the object type (qualifier) to query + * @param params the parameters for the query to perform + * @param connection database connection to use + * @return list of matched records in JSON object structure format + * @throws NotFoundException if the specified object could not be found + * @throws BadRequestException if the specified params contain invalid arguments, e.g. a query id that + * is not configured, a query expression that is invalid, or missing query substitution tokens * @throws InternalServerErrorException if the operation failed because of a (possibly transient) failure - * @throws java.sql.SQLException + * @throws SQLException if a DB failure is reported */ - public List> query(String type, Map params, Connection connection) + List> query(String type, Map params, Connection connection) throws SQLException, ResourceException; /** - * Performs the command on the specified target and returns the number of affected objects + * Get number of objects that match query as specified by the provided parameters. + * *

- * Commands are parametric; a set of named parameters is provided as the query criteria. - * The command returns the number of records altered/updated/deleted. + * Semantics of query parameters is the same as in {@link #query(String, Map, Connection)}. * - * @param type identifies the object to query. - * @param params the parameters of the query to perform. - * @param connection - * @return the number of records affected. - * @throws BadRequestException if the specified params contain invalid arguments, e.g. a query id that is not - * configured, a query expression that is invalid, or missing query substitution tokens. - * @throws ForbiddenException if access to the object or specified query is forbidden. + * @param type identifies the object type (qualifier) to query + * @param params the parameters for the query to perform + * @param connection database connection to use + * @return number of stored objects that match the specified query or null if the count can not + * be determined + * @throws BadRequestException if the specified params contain invalid arguments, e.g. a query id that + * is not configured, a query expression that is invalid, or missing query substitution tokens * @throws InternalServerErrorException if the operation failed because of a (possibly transient) failure - * @throws java.sql.SQLException + * @throws SQLException if a DB failure is reported */ - - public Integer command(String type, Map params, Connection connection) - throws SQLException, ResourceException; + default Integer queryCount(String type, Map params, Connection connection) + throws SQLException, ResourceException { + throw new UnsupportedOperationException(); // TODO remove default after dropping legacy handlers + } /** - * Check if a given queryId exists in our set of known queries + * Perform the command on the specified target and return the number of affected objects. * - * @param queryId Identifier for the query + *

+ * Commands are parametric; a set of named parameters is provided as the query criteria. + * The command returns the number of records altered/updated/deleted. * - * @return true if queryId is available + * @param type identifies the object set to query + * @param params the parameters of the query to perform + * @param connection database connection to use + * @return the number of records affected or {@code null} if unknown + * @throws BadRequestException if the specified params contain invalid arguments, e.g. a query id that + * is not configured, a query expression that is invalid, or missing query substitution tokens + * @throws InternalServerErrorException if the operation failed because of a (possibly transient) failure + * @throws SQLException if a DB failure is reported */ - public boolean queryIdExists(final String queryId); - + Integer command(String type, Map params, Connection connection) + throws SQLException, ResourceException; + /** - * Builds a raw query from the supplied filter. - * + * Build a raw query from the supplied filter. + * * @param filter the query filter * @param replacementTokens a map to store any replacement tokens * @param params a map containing query parameters - * @return the raw query String + * @param count whether to render a query for total number of matched rows + * @return the raw query string */ - public String renderQueryFilter(QueryFilter filter, Map replacementTokens, Map params); - + @Deprecated + default String renderQueryFilter(QueryFilter filter, Map replacementTokens, + Map params) { + throw new UnsupportedOperationException(); + } + /** - * Query if a given exception signifies a well known error type - * + * Check if a given queryId exists in our set of known queries + * + * @param queryId Identifier for the query + * @return true if queryId is available + */ + @Deprecated + default boolean queryIdExists(final String queryId) { + throw new UnsupportedOperationException(); + } + + /** + * Check if a given exception signifies a well known error type. + * + *

* Allows table handlers to abstract database specific differences in reporting errors. - * - * @param ex The exception thrown by the database + * + * @param exception the exception thrown by the database * @param errorType the error type to test against * @return true if the exception matches the error type passed */ - public boolean isErrorType(SQLException ex, ErrorType errorType); - + // XXX This is a strange method design... Wouldn't it be better to simply return ErrorType? + boolean isErrorType(SQLException exception, ErrorType errorType); + /** - * As whether a given exception should be retried - * @param ex the exception thrown by the database - * @param connection where the failure occured, used for additional context - * @return + * Determine whether a given exception can be followed up by a operation retry. + * + * @param exception the exception thrown by the database + * @param connection database connection where the failure occured (used for additional context) + * @return true if the operation that lead to the error should be retried. */ - public boolean isRetryable(SQLException ex, Connection connection); -} \ No newline at end of file + boolean isRetryable(SQLException exception, Connection connection); + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/GenericTableConfig.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/GenericTableConfig.java index b97ec446d4..721d1554ea 100644 --- a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/GenericTableConfig.java +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/GenericTableConfig.java @@ -12,6 +12,7 @@ * information: "Portions copyright [year] [name of copyright owner]". * * Copyright 2016 ForgeRock AS. + * Portions Copyright 2024 Wren Security. */ package org.forgerock.openidm.repo.jdbc.impl; @@ -22,13 +23,24 @@ * Generic table configuration. */ class GenericTableConfig { - public String mainTableName; - public String propertiesTableName; - public boolean searchableDefault; - public GenericPropertiesConfig properties; - public boolean isSearchable(JsonPointer propPointer) { + public final String mainTableName; + public final String propertiesTableName; + + public final boolean searchableDefault; + + public final GenericPropertiesConfig properties; + + private GenericTableConfig(JsonValue tableConfig) { + tableConfig.required(); + mainTableName = tableConfig.get("mainTable").required().asString(); + propertiesTableName = tableConfig.get("propertiesTable").required().asString(); + searchableDefault = tableConfig.get("searchableDefault").defaultTo(Boolean.TRUE).asBoolean(); + properties = GenericPropertiesConfig.parse(tableConfig.get("properties")); + } + + boolean isSearchable(JsonPointer propPointer) { // More specific configuration takes precedence Boolean explicit = null; while (!propPointer.isEmpty() && explicit == null) { @@ -53,13 +65,7 @@ public boolean hasPossibleSearchableProperties() { } public static GenericTableConfig parse(JsonValue tableConfig) { - GenericTableConfig cfg = new GenericTableConfig(); - tableConfig.required(); - cfg.mainTableName = tableConfig.get("mainTable").required().asString(); - cfg.propertiesTableName = tableConfig.get("propertiesTable").required().asString(); - cfg.searchableDefault = tableConfig.get("searchableDefault").defaultTo(Boolean.TRUE).asBoolean(); - cfg.properties = GenericPropertiesConfig.parse(tableConfig.get("properties")); - - return cfg; + return new GenericTableConfig(tableConfig); } + } diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/GenericTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/GenericTableHandler.java index 62100c0e7b..bbe59a6254 100644 --- a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/GenericTableHandler.java +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/GenericTableHandler.java @@ -22,6 +22,8 @@ import static org.forgerock.openidm.repo.QueryConstants.SORT_KEYS; import static org.forgerock.openidm.repo.util.Clauses.where; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; @@ -33,15 +35,14 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; - import org.apache.commons.lang3.StringUtils; import org.forgerock.json.JsonPointer; import org.forgerock.json.JsonValue; import org.forgerock.json.resource.InternalServerErrorException; import org.forgerock.json.resource.NotFoundException; import org.forgerock.json.resource.PreconditionFailedException; -import org.forgerock.json.resource.ResourceResponse; import org.forgerock.json.resource.ResourceException; +import org.forgerock.json.resource.ResourceResponse; import org.forgerock.json.resource.SortKey; import org.forgerock.openidm.repo.jdbc.Constants; import org.forgerock.openidm.repo.jdbc.ErrorType; @@ -52,9 +53,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; - /** * Handling of tables in a generic (not object specific) layout * @@ -82,7 +80,7 @@ public class GenericTableHandler implements TableHandler { final TypeReference> typeRef = new TypeReference>() {}; final TableQueries queries; - + final GenericResultSetMapper genericResultMapper = new GenericResultSetMapper(); Map queryMap; @@ -569,7 +567,7 @@ public void update(String fullId, String type, String localId, String rev, Map initializeQueryMap() { Map result = super.initializeQueryMap(); diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/MSSQLTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/MSSQLTableHandler.java index abd3bab6c7..d9715cfb17 100644 --- a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/MSSQLTableHandler.java +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/MSSQLTableHandler.java @@ -27,7 +27,6 @@ import java.sql.SQLException; import java.util.List; import java.util.Map; - import org.forgerock.json.JsonPointer; import org.forgerock.json.JsonValue; import org.forgerock.json.resource.SortKey; @@ -79,7 +78,7 @@ protected Map initializeQueryMap() { return result; } - + /* (non-Javadoc) * @see org.forgerock.openidm.repo.jdbc.impl.TableHandler#update(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.util.Map, java.sql.Connection) */ @@ -191,7 +190,11 @@ public String toSQL() { Clause buildNumericValueClause(String propTable, String operand, String placeholder) { return where(propTable + ".proptype = 'java.lang.Integer'") .or(propTable + ".proptype = 'java.lang.Double'") - .and("(CASE ISNUMERIC(propvalue) WHEN 1 THEN CAST(propvalue AS FLOAT) ELSE null END) " + operand + " ${" + placeholder + "}"); + .and("(" + + "CASE ISNUMERIC(" + propTable + ".propvalue) " + + "WHEN 1 THEN CAST(" + propTable + ".propvalue AS FLOAT) " + + "ELSE null END" + + ") " + operand + " ${" + placeholder + "}"); } }, replacementTokens)); @@ -207,7 +210,6 @@ Clause buildNumericValueClause(String propTable, String operand, String placehol } else { builder.orderBy("obj.id", false); } - return builder.toSQL(); } } diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/MappedTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/MappedTableHandler.java index 99a616381a..a0a2f969a6 100644 --- a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/MappedTableHandler.java +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/MappedTableHandler.java @@ -21,6 +21,7 @@ import static org.forgerock.openidm.repo.QueryConstants.PAGE_SIZE; import static org.forgerock.openidm.repo.QueryConstants.SORT_KEYS; +import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; @@ -31,7 +32,6 @@ import java.util.List; import java.util.Map; import java.util.Set; - import org.apache.commons.lang3.StringUtils; import org.forgerock.audit.util.JsonValueUtils; import org.forgerock.json.JsonPointer; @@ -40,8 +40,8 @@ import org.forgerock.json.resource.InternalServerErrorException; import org.forgerock.json.resource.NotFoundException; import org.forgerock.json.resource.PreconditionFailedException; -import org.forgerock.json.resource.ResourceResponse; import org.forgerock.json.resource.ResourceException; +import org.forgerock.json.resource.ResourceResponse; import org.forgerock.json.resource.SortKey; import org.forgerock.openidm.crypto.CryptoService; import org.forgerock.openidm.repo.jdbc.Constants; @@ -57,8 +57,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.fasterxml.jackson.databind.ObjectMapper; - /** * Handling of tables in a generic (not object specific) layout */ @@ -93,8 +91,9 @@ public MappedTableHandler(String tableName, Map mapping, String Accessor cryptoServiceAccessor) throws InternalServerErrorException { // TODO Replace this with a "guarantee" somewhere when/if the provision of this accessor becomes more automatic - if (cryptoServiceAccessor == null) + if (cryptoServiceAccessor == null) { throw new InternalServerErrorException("No CryptoServiceAccessor found!"); + } this.tableName = tableName; this.dbSchemaName = dbSchemaName; @@ -423,7 +422,7 @@ public void update(String fullId, String type, String localId, String rev, @Override public void delete(String fullId, String type, String localId, String rev, Connection connection) throws PreconditionFailedException, InternalServerErrorException, NotFoundException, - SQLException, IOException { + SQLException { logger.debug("Delete with fullid {}", fullId); // First check if the revision matches and select it for UPDATE @@ -485,11 +484,13 @@ public boolean queryIdExists(String queryId) { } // TODO: make common to generic and explicit handlers + @Override public boolean isErrorType(SQLException ex, ErrorType errorType) { return sqlExceptionHandler.isErrorType(ex, errorType); } // TODO: make common to generic and explicit handlers + @Override public boolean isRetryable(SQLException ex, Connection connection) { return sqlExceptionHandler.isRetryable(ex, connection); } diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/OracleTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/OracleTableHandler.java index 79e9c0b4b6..23e6d460b6 100755 --- a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/OracleTableHandler.java +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/OracleTableHandler.java @@ -36,7 +36,6 @@ import java.sql.SQLException; import java.util.List; import java.util.Map; - import org.forgerock.json.JsonPointer; import org.forgerock.json.JsonValue; import org.forgerock.json.resource.InternalServerErrorException; @@ -61,11 +60,8 @@ public OracleTableHandler(JsonValue tableConfig, String dbSchemaName, JsonValue @Override public void create(String fullId, String type, String localId, Map obj, Connection connection) throws SQLException, IOException, InternalServerErrorException { - connection.setAutoCommit(true); long typeId = getTypeId(type, connection); - connection.setAutoCommit(false); - PreparedStatement createStatement = null; try { // Since ORACLE returns the ROWID instead of an autoincremented column, we have to tell the PreparedStatement to diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/ResultSetMapper.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/ResultSetMapper.java index e5a620e1ac..8adab6d211 100644 --- a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/ResultSetMapper.java +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/ResultSetMapper.java @@ -25,10 +25,9 @@ /** * Handles the conversion of ResultSets into Object set results - * */ public interface ResultSetMapper { - + List> mapToObject(ResultSet rs, String queryId, String type, Map params) throws SQLException, IOException, InternalServerErrorException; diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/SQLBuilder.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/SQLBuilder.java index 138f962e13..3a0bef5cc0 100644 --- a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/SQLBuilder.java +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/SQLBuilder.java @@ -2,6 +2,7 @@ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright 2015 ForgeRock AS. All rights reserved. + * Portions Copyright 2024 Wren Security. * * The contents of this file are subject to the terms * of the Common Development and Distribution License @@ -25,13 +26,12 @@ import java.util.ArrayList; import java.util.List; - import org.apache.commons.lang3.StringUtils; -import org.wrensecurity.guava.common.base.Function; -import org.wrensecurity.guava.common.collect.FluentIterable; import org.forgerock.openidm.config.enhanced.InternalErrorException; import org.forgerock.openidm.repo.util.Clause; import org.forgerock.openidm.repo.util.SQLRenderer; +import org.wrensecurity.guava.common.base.Function; +import org.wrensecurity.guava.common.collect.FluentIterable; /** * An {@link org.forgerock.openidm.repo.util.SQLRenderer} that models an SQL SELECT statement and @@ -53,7 +53,7 @@ * } * */ -abstract class SQLBuilder implements SQLRenderer { +public abstract class SQLBuilder implements SQLRenderer { /** * Renders a select column. @@ -65,6 +65,7 @@ private class Column implements SQLRenderer { this.column = column; } + @Override public String toSQL() { return column; } @@ -82,6 +83,7 @@ private class Table implements SQLRenderer { this.alias = alias; } + @Override public String toSQL() { return table + (alias != null ? " " + alias : ""); } @@ -108,7 +110,7 @@ private enum JoinType implements SQLRenderer { /** * Models/renders a table join. */ - class Join implements SQLRenderer { + public class Join implements SQLRenderer { final JoinType type; final Table table; final Clause onClause; @@ -150,7 +152,7 @@ class Join implements SQLRenderer { * @param clause the on clause * @return the calling SQLBuilder with this completed join added */ - SQLBuilder on(Clause clause) { + public SQLBuilder on(Clause clause) { builder.addJoin(new Join(builder, type, table, clause)); return builder; } @@ -178,6 +180,7 @@ private class OrderBy implements SQLRenderer { this.ascending = ascending; } + @Override public String toSQL() { return order + " " + (ascending ? "ASC" : "DESC"); } @@ -196,7 +199,7 @@ public String toSQL() { * @param column the column to add to the select list * @return the builder */ - SQLBuilder addColumn(String column) { + public SQLBuilder addColumn(String column) { columns.add(new Column(column)); return this; } @@ -207,7 +210,7 @@ SQLBuilder addColumn(String column) { * @param table a table to select from * @return the builder */ - SQLBuilder from(String table) { + public SQLBuilder from(String table) { return from(table, null); } @@ -218,7 +221,7 @@ SQLBuilder from(String table) { * @param alias the table alias * @return the builder */ - SQLBuilder from(String table, String alias) { + public SQLBuilder from(String table, String alias) { tables.add(new Table(table, alias)); return this; } @@ -229,7 +232,7 @@ SQLBuilder from(String table, String alias) { * @param table the table to join * @return the Join */ - Join leftJoin(String table) { + public Join leftJoin(String table) { return leftJoin(table, null); } @@ -240,7 +243,7 @@ Join leftJoin(String table) { * @param alias the table's alias * @return the Join */ - Join leftJoin(String table, String alias) { + public Join leftJoin(String table, String alias) { return join(JoinType.LEFT_OUTER, table, alias); } @@ -250,7 +253,7 @@ Join leftJoin(String table, String alias) { * @param table the table to join * @return the Join */ - Join rightJoin(String table) { + public Join rightJoin(String table) { return rightJoin(table, null); } @@ -261,7 +264,7 @@ Join rightJoin(String table) { * @param alias the table's alias * @return the Join */ - Join rightJoin(String table, String alias) { + public Join rightJoin(String table, String alias) { return join(JoinType.RIGHT_OUTER, table, alias); } @@ -271,7 +274,7 @@ Join rightJoin(String table, String alias) { * @param table the table to join * @return the Join */ - Join join(String table) { + public Join join(String table) { return join(table, null); } @@ -282,7 +285,7 @@ Join join(String table) { * @param alias the table's alias * @return the Join */ - Join join(String table, String alias) { + public Join join(String table, String alias) { return join(JoinType.INNER, table, alias); } @@ -309,7 +312,7 @@ private SQLBuilder addJoin(Join join) { * @param whereClause the WhereClause * @return the builder */ - SQLBuilder where(Clause whereClause) { + public SQLBuilder where(Clause whereClause) { this.whereClause = whereClause; return this; } @@ -321,7 +324,7 @@ SQLBuilder where(Clause whereClause) { * @param ascending whether it is ascending * @return the builder */ - SQLBuilder orderBy(String orderBy, boolean ascending) { + public SQLBuilder orderBy(String orderBy, boolean ascending) { this.orderBys.add(new OrderBy(orderBy, ascending)); return this; } @@ -340,7 +343,7 @@ public String apply(SQLRenderer renderer) { * * @return a renderer for the column list */ - SQLRenderer getColumns() { + protected SQLRenderer getColumns() { return new SQLRenderer() { @Override public String toSQL() { @@ -356,7 +359,7 @@ public String toSQL() { * * @return a renderer for the from clause */ - SQLRenderer getFromClause() { + protected SQLRenderer getFromClause() { if (tables.isEmpty()) { throw new InternalErrorException("SQL query contains no tables in FROM clause"); } @@ -377,7 +380,7 @@ public String toSQL() { } }; - SQLRenderer getJoinClause() { + protected SQLRenderer getJoinClause() { if (joins.isEmpty()) { return NO_STRING; } @@ -395,7 +398,7 @@ public String toSQL() { * * @return a renderer for the where clause */ - SQLRenderer getWhereClause() { + protected SQLRenderer getWhereClause() { return new SQLRenderer() { @Override public String toSQL() { @@ -409,7 +412,7 @@ public String toSQL() { * * @return a renderer for the order-by clause */ - SQLRenderer getOrderByClause() { + protected SQLRenderer getOrderByClause() { if (orderBys.isEmpty()) { return NO_STRING; } @@ -428,6 +431,18 @@ public String toSQL() { @Override public abstract String toSQL(); + /** + * Render the SQL string for counting number of matched rows. + * + * @return rendered COUNT SQL string + */ + public String toCountSQL() { + return "SELECT COUNT(*) as total " + + getFromClause().toSQL() + + getJoinClause().toSQL() + + getWhereClause().toSQL(); + } + /** * Return a string representation of this builder. * diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/AbstractTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/AbstractTableHandler.java new file mode 100644 index 0000000000..75f069213b --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/AbstractTableHandler.java @@ -0,0 +1,178 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.handler; + +import static org.forgerock.openidm.repo.QueryConstants.QUERY_ID; +import static org.forgerock.openidm.repo.QueryConstants.SORT_KEYS; + +import com.fasterxml.jackson.databind.ObjectMapper; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.forgerock.json.JsonPointer; +import org.forgerock.json.resource.BadRequestException; +import org.forgerock.json.resource.InternalServerErrorException; +import org.forgerock.json.resource.SortKey; +import org.forgerock.openidm.repo.jdbc.Constants; +import org.forgerock.openidm.repo.jdbc.ErrorType; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.CleanupHelper; +import org.forgerock.openidm.repo.jdbc.impl.DefaultSQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.SQLBuilder; +import org.forgerock.openidm.repo.jdbc.impl.refactor.mapper.ResultMapper; +import org.forgerock.openidm.repo.jdbc.impl.refactor.statement.NamedParameterSupport; +import org.forgerock.openidm.repo.jdbc.impl.refactor.statement.PreparedSql; +import org.forgerock.openidm.smartevent.EventEntry; +import org.forgerock.openidm.smartevent.Name; +import org.forgerock.openidm.smartevent.Publisher; +import org.forgerock.util.query.QueryFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Common superclass with shared logic for {@link TableHandler} implementations. + */ +public abstract class AbstractTableHandler implements TableHandler { + + /** + * Monitoring event name prefix. + */ + private static final String EVENT_RAW_QUERY_PREFIX = "openidm/internal/repo/jdbc/raw/query/"; + + protected final Logger logger = LoggerFactory.getLogger(getClass()); + + protected final ObjectMapper objectMapper = new ObjectMapper(); + + private final SQLExceptionHandler exceptionHandler; + + public AbstractTableHandler(SQLExceptionHandler exceptionHandler) { + this.exceptionHandler = exceptionHandler != null ? exceptionHandler : new DefaultSQLExceptionHandler(); + } + + /** + * Resolve safe SQL replacement tokens (tokens that don't have to be escaped). + * + * @return map with replacement tokens that can be safely replaced in SQL + */ + protected abstract Map resolveReplacementTokens(); + + /** + * Start smart event measure for a query defined by the given params and parsed SQL. + * + * @param params query parameters + * @param querySql parsed query SQL + * @return smart event measure + */ + protected final EventEntry startQueryMeasure(Map params, String querySql) { + String queryId = (String) params.get(QUERY_ID); + return Publisher.start(Name.get(queryId != null + ? EVENT_RAW_QUERY_PREFIX + queryId + : EVENT_RAW_QUERY_PREFIX + "_query_expression"), querySql, null); + } + + /** + * Create prepared statement for the given prepared SQL string with its parameters. This method expects SQL + * parameters to be properly casted. + * + * @param preparedSql prepared SQL string with positional + * @param connection current database connection + * @return prepared statement with parameters set + * @throws InternalServerErrorException in case of parameter type inconsistency + * @throws SQLException in case of DB failure + */ + protected final PreparedStatement createPreparedStatement(PreparedSql preparedSql, Connection connection) + throws BadRequestException, InternalServerErrorException, SQLException { + PreparedStatement preparedStatement = null; + try { + preparedStatement = connection.prepareStatement(preparedSql.getSqlString()); + NamedParameterSupport.applyStatementParams(preparedStatement, preparedSql.getParameters()); + return preparedStatement; + } catch (SQLException ex) { + CleanupHelper.loggedClose(preparedStatement); + logger.debug("DB reported failure preparing query: {} " + + " with params: {} error code: {} sqlstate: {} message: {} ", + preparedSql.getSqlString(), preparedSql.getParameters(), + ex.getErrorCode(), ex.getSQLState(), ex.getMessage(), ex); + throw new InternalServerErrorException("DB reported failure preparing query."); + } + } + + /** + * Render query filter as SQL query. + * + * @param queryFilter the query filter to render + * @param sortKeys sort keys + * @param sqlParams query parameters to be used as named parameters + * @return SQL builder instance that is able to produce the final SQL string + */ + protected abstract SQLBuilder resolveQueryFilter(QueryFilter queryFilter, + List sortKeys, Map sqlParams); + + /** + * Resolve sort keys for the query filter request making sure there is always a stable iteration + * order by including sort key for object identifier. + * + * @param params query request parameters + * @return list of sort keys + */ + @SuppressWarnings("unchecked") + protected final List resolveSortKeys(Map params) { + var sortKeys = (List) params.get(SORT_KEYS); + if (sortKeys == null) { + sortKeys = Collections.EMPTY_LIST; + } + var containsId = sortKeys.stream().anyMatch(sortKey -> { + return Constants.OBJECT_ID.equals(sortKey.getField().toString()); + }); + if (!containsId) { + sortKeys = Stream.concat(sortKeys.stream(), Stream.of(SortKey.ascendingOrder(Constants.OBJECT_ID))) + .collect(Collectors.toList()); + } + return sortKeys; + } + + /** + * Create result set mapper for the given result set meta data. + * + * @param metaData result set meta data + * @return result mapper instance + * @throws SQLException in case of DB failure + */ + protected abstract ResultMapper> createResultMapper(ResultSetMetaData metaData) + throws SQLException; + + /** + * Extract normalized (lowercase) column names. + * + * @param metaData the current result set metada data + * @return result set column names + * @throws SQLException in case of SQL error + */ + protected final Collection extractColumnNames(ResultSetMetaData metaData) throws SQLException { + int columnCount = metaData.getColumnCount(); + Set columnNames = new TreeSet(); + for (int idx = 1; idx <= columnCount; idx++) { + columnNames.add(metaData.getColumnName(idx).toLowerCase()); + } + return columnNames; + } + + @Override + public final boolean isErrorType(SQLException exception, ErrorType errorType) { + return exceptionHandler.isErrorType(exception, errorType); + } + + @Override + public final boolean isRetryable(SQLException exception, Connection connection) { + return exceptionHandler.isRetryable(exception, connection); + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/GenericTableConfig.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/GenericTableConfig.java new file mode 100644 index 0000000000..860d778427 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/GenericTableConfig.java @@ -0,0 +1,123 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.handler; + +import java.util.HashMap; +import java.util.Map; +import org.forgerock.json.JsonPointer; +import org.forgerock.json.JsonValue; + +/** + * Parsed generic table configuration. + * + *

+ * This class corresponds to $.resourceMapping.genericMapping[*] properties of {@code JDBCRepoService}'s + * service configuration. + */ +class GenericTableConfig { + + /** + * Property value type. + */ + public enum ValueType { + + STRING, NUMBER, BOOLEAN; + + } + + /** + * Main table storing the generic object state. + */ + public final String mainTableName; + + /** + * Helper properties table used for property querying when the DB does not support JSON values. + */ + public final String propTableName; + + /** + * Whether properties should be stored in the helper table by default. + */ + public final boolean searchableDefault; + + /** + * Explicit search configuration (overrides {@link #searchableDefault}). + */ + public final Map explicitlySearchable; + + /** + * Expected property value types (used for type casting). + */ + // TODO this is a new undocumented feature (this should allow correct numeric ordering) + public final Map propertyTypes; + + /** + * Flag indicating that the configuration defines at least one searchable property. + */ + public final boolean containsSearchable; + + private GenericTableConfig(JsonValue tableConfig) { + tableConfig.required(); + + mainTableName = tableConfig.get("mainTable").required().asString(); + propTableName = tableConfig.get("propertiesTable").required().asString(); + searchableDefault = tableConfig.get("searchableDefault").defaultTo(Boolean.TRUE).asBoolean(); + + var propsConfig = tableConfig.get("properties"); + Map explicitlySearchable = new HashMap<>(); + Map propertyTypes = new HashMap<>(); + for (var propName : propsConfig.keys()) { + var propConfig = propsConfig.get(propName); + var jsonPointer = new JsonPointer(propName); + if (propConfig.isDefined("searchable")) { + explicitlySearchable.put(jsonPointer, propConfig.get("searchable").asBoolean()); + } + if (propConfig.isDefined("type")) { + propertyTypes.put(jsonPointer, ValueType.valueOf(propConfig.get("type").asString())); + } + } + this.explicitlySearchable = Map.copyOf(explicitlySearchable); + this.propertyTypes = Map.copyOf(propertyTypes); + + containsSearchable = searchableDefault || explicitlySearchable.containsValue(Boolean.TRUE); + } + + /** + * Determine if the property defined by the given pointer can be used in query filters. + * + * @param pointer property pointer + * @return {@code true} if the property can be used in query filters, {@code false} if not + */ + public boolean isSearchable(JsonPointer pointer) { + Boolean explicit = null; + while (!pointer.isEmpty() && explicit == null) { + explicit = explicitlySearchable.get(pointer); + pointer = pointer.parent(); + } + return explicit != null ? explicit : searchableDefault; + } + + /** + * Parse table configuration. + * + * @param tableConfig JSON object with table configuration + * @return parsed table configuration + */ + public static GenericTableConfig parse(JsonValue tableConfig) { + return new GenericTableConfig(tableConfig); + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/GenericTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/GenericTableHandler.java new file mode 100644 index 0000000000..d86a3cbc47 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/GenericTableHandler.java @@ -0,0 +1,645 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.handler; + +import static org.forgerock.json.resource.Responses.newResourceResponse; +import static org.forgerock.openidm.repo.util.Clauses.where; + +import java.io.IOException; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.EnumMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; +import org.forgerock.json.JsonPointer; +import org.forgerock.json.JsonValue; +import org.forgerock.json.resource.BadRequestException; +import org.forgerock.json.resource.InternalServerErrorException; +import org.forgerock.json.resource.NotFoundException; +import org.forgerock.json.resource.PreconditionFailedException; +import org.forgerock.json.resource.ResourceException; +import org.forgerock.json.resource.ResourceResponse; +import org.forgerock.json.resource.SortKey; +import org.forgerock.openidm.repo.jdbc.Constants; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.SQLBuilder; +import org.forgerock.openidm.repo.jdbc.impl.refactor.mapper.ColumnResultMapper; +import org.forgerock.openidm.repo.jdbc.impl.refactor.mapper.ResultMapper; +import org.forgerock.openidm.repo.jdbc.impl.refactor.mapper.ResultMappers; +import org.forgerock.openidm.repo.jdbc.impl.refactor.query.GenericSQLQueryFilterVisitor; +import org.forgerock.openidm.repo.jdbc.impl.refactor.query.TableQueryHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.statement.NamedParameterCollector; +import org.forgerock.openidm.repo.jdbc.impl.refactor.statement.NamedParameterSql; +import org.forgerock.openidm.repo.util.TokenHandler; +import org.forgerock.util.query.QueryFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Generic table handler that supports objects stored as JSON string with a separate properies table used for + * indexing and querying objects by property value filters. + * + *

+ * The additional properties table is not necessary for databases that support query conditions based on JSON + * fields (e.g. PostgreSQL). Such databases have their own generic table handler implementation. + */ +public class GenericTableHandler extends AbstractTableHandler { + + /** + * Well-known implicit SQL statement types. + */ + protected enum ImplicitSqlType { + READTYPE, + CREATETYPE, + READ, + READFORUPDATE, + CREATE, + UPDATE, + DELETE, + PROPCREATE, + PROPDELETE, + QUERYALLIDS + } + + /** + * Maximum length of searchable properties. + * + *

+ * This is used to trim values due to database index size limitations. + */ + protected static final int DEFAULT_SEARCHABLE_LENGTH = 2000; + + protected final Logger logger = LoggerFactory.getLogger(getClass()); + + private final String schemaName; + + private final GenericTableConfig tableConfig; + + protected final Map implicitSql; + + protected final TableQueryHandler> queryHandler; + + private final int batchSize; + + private final GenericTypeResolver typeResolver; + + private final ResultMappers resultMappers; + + public GenericTableHandler( + String schemaName, + JsonValue tableConfig, + Map queryConfig, + Map commandConfig, + int batchSize, + SQLExceptionHandler exceptionHandler) { + super(exceptionHandler); + + this.schemaName = schemaName; + this.tableConfig = GenericTableConfig.parse(tableConfig); + + var replacementTokens = resolveReplacementTokens(); + + this.implicitSql = initializeImplicitSql().entrySet().stream() + .map(entry -> { + String resolved = new TokenHandler().replaceSomeTokens(entry.getValue(), replacementTokens); + return Map.entry(entry.getKey(), resolved); + }) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + + this.queryHandler = new TableQueryHandler<>( + replacementTokens, + queryConfig.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, entry -> { + return NamedParameterSql.parse(entry.getValue(), replacementTokens); + })), + commandConfig.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, entry -> { + return NamedParameterSql.parse(entry.getValue(), replacementTokens); + })), + this::resolveQueryFilter, + this::createResultMapper); + + this.batchSize = batchSize > 1 ? batchSize : 0; + + this.typeResolver = new GenericTypeResolver( + implicitSql.get(ImplicitSqlType.READTYPE), + implicitSql.get(ImplicitSqlType.CREATETYPE)); + + this.resultMappers = new ResultMappers(objectMapper); + } + + @Override + protected Map resolveReplacementTokens() { + return Map.ofEntries( + Map.entry("_dbSchema", schemaName), + Map.entry("_mainTable", this.tableConfig.mainTableName), + Map.entry("_propTable", this.tableConfig.propTableName) + ); + } + + /** + * Initialize implicit SQL statements for the table handler. + * + *

+ * This method serves as extension point for vendor specific subclasses. + * + * @return mutable map with implicit SQL statements + * @see ImplicitSqlType + */ + protected Map initializeImplicitSql() { + Map result = new EnumMap<>(ImplicitSqlType.class); + + // object types table + result.put(ImplicitSqlType.CREATETYPE, + "INSERT INTO ${_dbSchema}.objecttypes (objecttype) VALUES (?)"); + result.put(ImplicitSqlType.READTYPE, + "SELECT id FROM ${_dbSchema}.objecttypes objtype " + + "WHERE objtype.objecttype = ?"); + + // main object table + result.put(ImplicitSqlType.READ, + "SELECT obj.rev, obj.fullobject " + + "FROM ${_dbSchema}.objecttypes objtype, ${_dbSchema}.${_mainTable} obj " + + "WHERE obj.objecttypes_id = objtype.id AND objtype.objecttype = ? AND obj.objectid = ?"); + result.put(ImplicitSqlType.READFORUPDATE, + "SELECT obj.* " + + "FROM ${_dbSchema}.${_mainTable} obj " + + "WHERE " + + "obj.objecttypes_id = (" + + "SELECT id FROM ${_dbSchema}.objecttypes objtype " + + "WHERE objtype.objecttype = ?" + + ") AND " + + "obj.objectid = ? " + + "FOR UPDATE"); + result.put(ImplicitSqlType.CREATE, + "INSERT INTO ${_dbSchema}.${_mainTable} (" + + "objecttypes_id, objectid, rev, fullobject" + + ") VALUES (" + + "?, ?, ?, ?" + + ")"); + result.put(ImplicitSqlType.UPDATE, + "UPDATE ${_dbSchema}.${_mainTable} " + + "SET " + + "objectid = ?, " + + "rev = ?, " + + "fullobject = ? " + + "WHERE id = ?"); + result.put(ImplicitSqlType.DELETE, + "DELETE FROM ${_dbSchema}.${_mainTable} " + + "WHERE " + + "EXISTS (" + + "SELECT 1 FROM ${_dbSchema}.objecttypes objtype " + + "WHERE " + + "objtype.id = ${_mainTable}.objecttypes_id AND " + + "objtype.objecttype = ?" + + ") AND " + + "objectid = ? AND " + + "rev = ?"); + + // indexed properties table + result.put(ImplicitSqlType.PROPCREATE, + "INSERT INTO ${_dbSchema}.${_propTable} (" + + "${_mainTable}_id, propkey, proptype, propvalue" + + ") VALUES (" + +"?, ?, ?, ?" + + ")"); + result.put(ImplicitSqlType.PROPDELETE, + "DELETE FROM ${_dbSchema}.${_propTable} WHERE ${_mainTable}_id = ?"); + + // default object queries + result.put(ImplicitSqlType.QUERYALLIDS, "SELECT obj.objectid FROM ${_dbSchema}.${_mainTable} obj " + + "INNER JOIN ${_dbSchema}.objecttypes objtype ON " + + "obj.objecttypes_id = objtype.id " + + "WHERE objtype.objecttype = ${_resource}"); + + return result; + } + + /** + * Resolve implicit SQL statement. + * + * @param type statement type + * @param keys whether to return generated keys + * @param connection current database connection + * @return resolved prepared statement + * @throws SQLException in case of DB failure + */ + protected PreparedStatement resolveImplicitStatement(ImplicitSqlType type, boolean keys, Connection connection) + throws SQLException { + return keys + ? connection.prepareStatement(implicitSql.get(type), Statement.RETURN_GENERATED_KEYS) + : connection.prepareStatement(implicitSql.get(type)); + } + + @Override + public ResourceResponse read(String fullId, String type, String localId, Connection connection) + throws NotFoundException, IOException, SQLException { + List> results = new ArrayList<>(); + + try (var readStatement = resolveImplicitStatement(ImplicitSqlType.READ, false, connection)) { + logger.trace("Populating prepared statement {} for {}", readStatement, fullId); + readStatement.setString(1, type); + readStatement.setString(2, localId); + + logger.debug("Executing: {}", readStatement); + try (var resultSet = readStatement.executeQuery()) { + var resultMapper = resultMappers.forFullObject(); + while (resultSet.next()) { + results.add(resultMapper.map(resultSet)); + } + } + } + + if (results.isEmpty()) { + throw new NotFoundException("Object " + fullId + " not found in " + type); + } + var result = results.get(0); + String revision = (String) result.get(Constants.OBJECT_REV); + logger.debug(" full id: {}, rev: {}, obj {}", fullId, revision, result); + + return newResourceResponse(localId, revision, new JsonValue(result)); + } + + @Override + public void create(String fullId, String type, String localId, Map obj, Connection connection) + throws PreconditionFailedException, InternalServerErrorException, IOException, SQLException { + logger.debug("Create with fullid {}", fullId); + + long typeId = typeResolver.resolveTypeId(type, connection); + String revision = "0"; + + // update object properties + obj.put(Constants.OBJECT_ID, localId); + obj.put(Constants.OBJECT_REV, revision); + + // serialize full object state + String fullObject = objectMapper.writeValueAsString(obj); + + try (var createStatement = resolveImplicitStatement(ImplicitSqlType.CREATE, true, connection)) { + logger.trace("Populating statement {} with params {}, {}, {}, {}", + createStatement, typeId, localId, revision, fullObject); + createStatement.setLong(1, typeId); + createStatement.setString(2, localId); + createStatement.setString(3, revision); + createStatement.setString(4, fullObject); + + logger.debug("Executing: {}", createStatement); + createStatement.executeUpdate(); + + long databaseId; + try (var generatedKeys = createStatement.getGeneratedKeys()) { + if (!generatedKeys.next()) { + throw new InternalServerErrorException("Object creation for " + fullId + + " failed to retrieve an assigned ID from the DB."); + } + databaseId = generatedKeys.getLong(1); + } + + logger.debug("Created object for id {} with rev {}", fullId, revision); + writeValueProperties(fullId, databaseId, new JsonValue(obj), connection); + } + } + + /** + * Write properties of a given resource to the properties table and links them to the main table record. + * + * @param fullId the qualified identifier of the owner object + * @param databaseId the generated identifier to link the properties table with the main table (foreign key) + * @param value the JSON value with the properties to write + * @param connection the DB connection + * @throws SQLException if the insert failed + */ + protected void writeValueProperties(String fullId, long databaseId, JsonValue value, Connection connection) + throws SQLException { + if (!tableConfig.containsSearchable) { + return; // no searchable properties, no need to index + } + + Map pairs = new LinkedHashMap(); + extractValueProperties(value, pairs::put); + + try (var createStatement = resolveImplicitStatement(ImplicitSqlType.PROPCREATE, false, connection)) { + int batchingCount = 0; + + for (var pair : pairs.entrySet()) { + // prepare index properties + var object = pair.getValue(); + var idxkey = pair.getKey().toString(); + var idxtype = object != null ? object.getClass().getName() : null; + var idxvalue = object != null ? StringUtils.left(object.toString(), getSearchableLength()) : null; + + // set statement parameters + if (logger.isTraceEnabled()) { + logger.trace("Populating statement {} with params {}, {}, {}, {}", + createStatement, databaseId, idxkey, idxtype, idxvalue); + } + createStatement.setLong(1, databaseId); + createStatement.setString(2, idxkey); + createStatement.setString(3, idxtype); + createStatement.setString(4, idxvalue); + + // handle statement execution + if (batchSize > 0) { + createStatement.addBatch(); + if (++batchingCount >= batchSize) { + int[] updates = createStatement.executeBatch(); + if (logger.isDebugEnabled()) { + logger.debug("Batch limit reached, update of objectproperties updated: {}", + Arrays.asList(updates)); + } + createStatement.clearBatch(); + batchingCount = 0; + } + } else { + createStatement.executeUpdate(); + } + } + + if (batchingCount > 0) { + int[] updates = createStatement.executeBatch(); + if (logger.isDebugEnabled()) { + logger.debug("Writing batch of objectproperties, updated: {}", Arrays.asList(updates)); + } + } + } + } + + /** + * Recursive function to extract searchable property values that should be indexed. + * + * @param value JSON value (array or object) + * @param collector callback for collecting extracted property values + */ + private void extractValueProperties(JsonValue json, BiConsumer collector) { + for (JsonValue entry : json) { + JsonPointer pointer = entry.getPointer(); + if (!tableConfig.isSearchable(pointer)) { + continue; + } + if (entry.isMap() || entry.isList()) { + extractValueProperties(entry, collector); + continue; + } + collector.accept(pointer, entry.getObject()); + } + } + + /** + * Remove properties of a resource stored under the specified database identifier from the properties table. + * + * @param databaseId the identifier that link the properties table with the main table (foreign key) + * @param fullId the qualified identifier of the owner object + * @param connection the DB connection + * @throws SQLException if the insert failed + */ + protected void clearValueProperties(String fullId, long databaseId, Connection connection) throws SQLException { + try (var deleteStatement = resolveImplicitStatement(ImplicitSqlType.PROPDELETE, false, connection)) { + logger.trace("Populating prepared statement {} with {}", deleteStatement, databaseId); + deleteStatement.setLong(1, databaseId); + int deleteCount = deleteStatement.executeUpdate(); + logger.trace("Deleted child rows: {} for: {}", deleteCount, fullId); + } + } + + /** + * Read an object with FOR UPDATE lock applied. + * + * @param fullId qualified id of component type and id + * @param type the qualifier of the object to retrieve + * @param localId the identifier without the qualifier of the object to retrieve + * @param connection database connection to use + * @return the row as a map of column name/value pairs for the requested object + * @throws NotFoundException if the requested object was not found in the DB + * @throws java.sql.SQLException for general DB issues + */ + protected Map readForUpdate(String fullId, String type, String localId, Connection connection) + throws NotFoundException, SQLException { + try (var readStatement = resolveImplicitStatement(ImplicitSqlType.READFORUPDATE, false, connection)) { + logger.trace("Populating prepared statement {} for {}", readStatement, fullId); + readStatement.setString(1, type); + readStatement.setString(2, localId); + + logger.debug("Executing: {}", readStatement); + try (var resultSet = readStatement.executeQuery()) { + if (!resultSet.next()) { + throw new NotFoundException("Object " + fullId + " not found in " + type); + } + return new ColumnResultMapper(resultSet.getMetaData()).map(resultSet); + } + } + } + + @Override + public void update(String fullId, String type, String localId, String rev, Map obj, + Connection connection) throws NotFoundException, PreconditionFailedException, BadRequestException, + InternalServerErrorException, IOException, SQLException { + logger.debug("Update with fullid {}", fullId); + + // read existing object state + JsonValue existingObj = new JsonValue(readForUpdate(fullId, type, localId, connection)); + String existingRev = existingObj.get(Constants.RAW_OBJECT_REV).asString(); + long databaseId = existingObj.get(Constants.RAW_ID).asLong(); + long typeId = existingObj.get("objecttypes_id").asLong(); + logger.debug("Update existing object {} rev: {} db id: {}, object type db id: {}", fullId, existingRev, + databaseId, typeId); + + // perform optimistic version locking + if (!existingRev.equals(rev)) { + throw new PreconditionFailedException("Update rejected as current Object revision " + existingRev + + " is different than expected by caller (" + rev + "), the object has changed since retrieval."); + } + + // support changing object identifier + String updatedId = (String) obj.get(Constants.OBJECT_ID); + if (updatedId != null && !updatedId.equals(localId)) { + logger.debug("Object identifier is changing from " + localId + " to " + updatedId); + } else { + updatedId = localId; // if it hasn't changed, use the existing ID + obj.put(Constants.OBJECT_ID, updatedId); // ensure the ID is saved in the object + } + + // handle revision update + String updatedRev = Integer.toString(Integer.parseInt(rev) + 1); + obj.put(Constants.OBJECT_REV, updatedRev); + + // serialize full object state + String fullObject = objectMapper.writeValueAsString(obj); + + try (var updateStatement = resolveImplicitStatement(ImplicitSqlType.UPDATE, false, connection)) { + logger.trace("Populating prepared statement {} with {} {} {} {} {}", updateStatement, fullId, updatedId, + updatedRev, fullObject, databaseId); + updateStatement.setString(1, updatedId); + updateStatement.setString(2, updatedRev); + updateStatement.setString(3, fullObject); + updateStatement.setLong(4, databaseId); + logger.debug("Update statement: {}", updateStatement); + + int updateCount = updateStatement.executeUpdate(); + logger.trace("Updated rows: {} for {}", updateCount, fullId); + if (updateCount != 1) { + throw new InternalServerErrorException("Update execution did not result in updating 1 " + + "row as expected. Updated rows: " + updateCount); + } + } + + clearValueProperties(fullId, databaseId, connection); + writeValueProperties(fullId, databaseId, new JsonValue(obj), connection); + } + + @Override + public void delete(String fullId, String type, String localId, String rev, Connection connection) + throws SQLException, ResourceException { + logger.debug("Delete with fullid {}", fullId); + + // read existing object state + JsonValue existingObj = new JsonValue(readForUpdate(fullId, type, localId, connection)); + String existingRev = existingObj.get(Constants.RAW_OBJECT_REV).asString(); + + // perform optimistic version locking + if (!"*".equals(existingRev) && !existingRev.equals(rev)) { + throw new PreconditionFailedException("Delete rejected as current Object revision " + existingRev + + " is different than the expected by caller " + rev + ", the object has changed since retrieval."); + } + + // rely on ON DELETE CASCADE for connected object properties to be deleted + try (var deleteStatement = resolveImplicitStatement(ImplicitSqlType.DELETE, false, connection)) { + logger.trace("Populating prepared statement {} for {} {} {} {}", deleteStatement, fullId, type, localId, rev); + deleteStatement.setString(1, type); + deleteStatement.setString(2, localId); + deleteStatement.setString(3, rev); + logger.debug("Delete statement: {}", deleteStatement); + + int deletedRows = deleteStatement.executeUpdate(); + logger.trace("Deleted {} rows for id : {} {}", deletedRows, localId); + if (deletedRows < 1) { + throw new InternalServerErrorException("Deleting object for " + fullId + " failed, DB reported " + + deletedRows + " rows deleted"); + } else { + logger.debug("Delete for id succeeded: {} revision: {}", localId, rev); + } + } + } + + @Override + public List> query(String type, Map params, Connection connection) + throws SQLException, ResourceException { + return queryHandler.query(type, params, connection); + } + + @Override + protected ResultMapper> createResultMapper(ResultSetMetaData metaData) throws SQLException { + Collection columnNames = extractColumnNames(metaData); + if (columnNames.contains(ResultMappers.OBJECT_COLUMN)) { + return resultMappers.forFullObject(); + } else if (columnNames.contains(ResultMappers.TOTAL_COLUMN)) { + return resultMappers.forTotalCount(); + } else { + return resultMappers.forObjectRef(columnNames.contains(Constants.RAW_OBJECT_REV)); + } + } + + @Override + protected SQLBuilder resolveQueryFilter(QueryFilter queryFilter, List sortKeys, + Map sqlParams) { + var builder = createSqlBuilder(); + + var collector = new NamedParameterCollector(sqlParams); + + var visitor = createFilterVisitor(builder); + builder.addColumn("obj.*") + .from("${_dbSchema}.${_mainTable}", "obj") + .join("${_dbSchema}.objecttypes", "objecttypes") + .on(where("obj.objecttypes_id = objecttypes.id") + .and("objecttypes.objecttype = ${_resource}")) + .where(queryFilter.accept(visitor, collector)); + + if (sortKeys != null) { + for (var sortKey : sortKeys) { + var tokenName = collector.register("s", sortKey.getField().toString()); + var joinAlias = collector.generate("o"); + builder.join("${_dbSchema}.${_propTable}", joinAlias) + .on(where(joinAlias + ".${_mainTable}_id = obj.id") + .and(joinAlias + ".propkey = ${" + tokenName + "}")) + .orderBy(joinAlias + ".propvalue", sortKey.isAscendingOrder()); + } + } + + return builder; + } + + /** + * Create new {@link SQLBuilder} to render query filter queries. + * + * @return new SQLBuilder instance + */ + protected SQLBuilder createSqlBuilder() { + return new SQLBuilder() { + @Override + public String toSQL() { + return "SELECT " + getColumns().toSQL() + + getFromClause().toSQL() + + getJoinClause().toSQL() + + getWhereClause().toSQL() + + getOrderByClause().toSQL() + + " LIMIT ${int:_pageSize} " + + " OFFSET ${int:_pagedResultsOffset}"; + } + }; + } + + /** + * Get the maximum allowed length of searchable field value. This depends on maximum allowed length + * of indexable value in the database engine. + * + * @return the maximum length of searchable (indexable) value + */ + protected int getSearchableLength() { + return DEFAULT_SEARCHABLE_LENGTH; + } + + /** + * Create new {@link GenericSQLQueryFilterVisitor} to render query filter queries. + * + * @param builder SQL builder instance + * @return new GenericSQLQueryFilterVisitor instance + */ + protected GenericSQLQueryFilterVisitor createFilterVisitor(SQLBuilder builder) { + return new GenericSQLQueryFilterVisitor(getSearchableLength(), builder); + } + + @Override + public Integer queryCount(String type, Map params, Connection connection) + throws SQLException, ResourceException { + return queryHandler.queryCount(type, params, connection); + } + + @Override + public Integer command(String type, Map params, Connection connection) + throws SQLException, ResourceException { + return queryHandler.command(type, params, connection); + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/GenericTypeResolver.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/GenericTypeResolver.java new file mode 100644 index 0000000000..1c4351d264 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/GenericTypeResolver.java @@ -0,0 +1,125 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.handler; + +import java.sql.Connection; +import java.sql.SQLException; +import org.forgerock.json.resource.InternalServerErrorException; +import org.forgerock.openidm.repo.jdbc.Constants; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Generic object type resolver that maps type names to numeric identifiers stored in objecttypes table. + * + *

+ * This class simply serves as a place to offload type handling logic from {@link GenericTableHandler} to keep it + * more simple. + */ +public class GenericTypeResolver { + + private final Logger logger = LoggerFactory.getLogger(getClass()); + + private final String readSql; + + private final String createSql; + + /** + * Create new type resolver using the given CREATE and READ SQL strings. + * @param readSql read SQL string for type ID with one type name parameter + * @param createSql create SQL string with one type name parameter + */ + public GenericTypeResolver(String readSql, String createSql) { + this.readSql = readSql; + this.createSql = createSql; + } + + /** + * Resolve type name to the stored type identifier. + * + *

+ * Method will automatically register missing types when requested. Callers should note that this + * may commit a transaction and start a new one if a new type gets added. + * + * @param type object type name + * @param connection current database connection + * @return resolved type identifier or {@code -1} if unable to resolve + * @throws InternalServerErrorException in case the automatic registration fails + * @throws SQLException in case of SQL error + */ + public long resolveTypeId(String type, Connection connection) + throws InternalServerErrorException, SQLException { + long typeId = readTypeId(type, connection); + if (typeId >= 0) { + return typeId; + } + + Exception detectedEx = null; + try { + createTypeId(type, connection); + } catch (SQLException e) { + // ignore exception as it may have been caused by duplicate key violation + detectedEx = e; + } + + typeId = readTypeId(type, connection); + if (typeId < 0) { + throw new InternalServerErrorException( + "Failed to populate and look up objecttypes table, no id could be retrieved for " + type, + detectedEx); + } + return typeId; + } + + /** + * Resolve type name to the stored type identifier. + * + * @param type object type name + * @param connection current database connection + * @return resolved type identifier or {@code -1} if unable to resolve + * @throws SQLException in case of SQL error + */ + public long readTypeId(String type, Connection connection) throws SQLException { + try (var readStatement = connection.prepareStatement(readSql)) { + logger.trace("Populating prepared statement {} for {}", readSql, type); + readStatement.setString(1, type); + + logger.debug("Executing: {}", readStatement); + try (var resultSet = readStatement.executeQuery()) { + if (resultSet.next()) { + var typeId = resultSet.getLong(Constants.RAW_ID); + logger.debug("Type: {}, id: {}", type, typeId); + return typeId; + } + } + } + return -1; + } + + private void createTypeId(String type, Connection connection) throws SQLException { + // commit the new type right away, and have no transaction isolation for read + connection.setAutoCommit(true); + try (var createStatement = connection.prepareStatement(createSql)) { + logger.debug("Create objecttype {}", type); + createStatement.setString(1, type); + logger.debug("Executing: {}", createStatement); + createStatement.executeUpdate(); + } finally { + connection.setAutoCommit(false); + } + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/MappedColumnConfig.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/MappedColumnConfig.java new file mode 100644 index 0000000000..5886d4b4b7 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/MappedColumnConfig.java @@ -0,0 +1,141 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.0.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.0.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2016 ForgeRock AS. + * Portions Copyright 2024 Wren Security. + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.handler; + +import org.forgerock.json.JsonPointer; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.config.enhanced.InvalidException; + +/** + * Parsed table column mapping configuration. + * + *

+ * This class corresponds to $.resourceMapping.explicitMapping[*].objectToColumn properties of + * {@code JDBCRepoService}'s service configuration. + */ +public class MappedColumnConfig { + + /** + * Property value type. + */ + public enum ValueType { + + STRING, NUMBER, BOOLEAN, JSON_MAP, JSON_LIST; + + } + + public static final String COLUMN_NAME = "column"; + public static final String VALUE_TYPE = "valueType"; + + public final JsonPointer propertyName; + public final ValueType valueType; + public final String columnName; + + public MappedColumnConfig(JsonPointer propertyName, String columnName, ValueType valueType) { + this.propertyName = propertyName; + this.columnName = columnName; + this.valueType = valueType; + } + + /** + * Determine whether the column holds compound JSON value. + * + * @return true if the value is compound JSON value (basically object or array). + */ + public boolean isJson() { + return valueType == ValueType.JSON_LIST || valueType == ValueType.JSON_MAP; + } + + /** + * Parse column mapping configuration. + * + * @param name property name (JSON pointer) + * @param columnConfig JSON object with column configuration. + * @return parsed column configuration + */ + public static MappedColumnConfig parse(String name, JsonValue columnConfig) { + if (columnConfig.isList()) { + return parseList(name, columnConfig); + } else if (columnConfig.isMap()) { + return parseMap(name, columnConfig); + } else { + return new MappedColumnConfig( + new JsonPointer(name), + columnConfig.required().asString(), + ValueType.STRING); + } + } + + /** + * Parse list based column configuration. + * + * Definition: + * + *

+     *   "propertyPointer": ["columnName", "valueType"],
+     * 
+ * + * Example: + * + *
+     *   "foo": ["foo", "STRING"]
+     * 
+ */ + private static MappedColumnConfig parseList(String name, JsonValue columnConfig) { + int size = columnConfig.asList().size(); + if (size < 2 || size > 3) { + throw new InvalidException("Explicit table mapping has invalid entry for " + + name + ", expecting [column name, value type, stored type] but contains " + + columnConfig.asList()); + } + return new MappedColumnConfig( + new JsonPointer(name), + columnConfig.get(0).required().asString(), + size > 1 ? ValueType.valueOf(columnConfig.get(1).asString()) : ValueType.STRING); + } + + /** + * Parse map based column configuration. + * + * Definition: + * + *
+     *   "propertyPointer": {
+     *     "type": "VALUE_TYPE",
+     *   },
+     * 
+ * + * Example: + * + *
+     *   "foo": {
+     *     "type": "NUMBER"
+     *   }
+     * 
+ */ + private static MappedColumnConfig parseMap(String name, JsonValue columnConfig) { + String valueType = columnConfig.get("type").asString(); // short name + if (columnConfig.isDefined(VALUE_TYPE)) { + valueType = columnConfig.get(VALUE_TYPE).asString(); + } + return new MappedColumnConfig( + new JsonPointer(name), + columnConfig.get(COLUMN_NAME).required().asString(), + valueType != null ? ValueType.valueOf(valueType) : ValueType.STRING); + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/MappedConfigResolver.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/MappedConfigResolver.java new file mode 100644 index 0000000000..60d87fdaa6 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/MappedConfigResolver.java @@ -0,0 +1,20 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.handler; + +import org.forgerock.json.JsonPointer; + +/** + * Property pointer to mapped table column configuration resolver. + */ +@FunctionalInterface +public interface MappedConfigResolver { + + /** + * Resolve column configuration for the given JSON property pointer. + * + * @param property property pointer + * @return mapped table column configuration + * @throws IllegalArgumentException when there is no column configuration for the given pointer + */ + MappedColumnConfig resolve(JsonPointer property); + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/MappedTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/MappedTableHandler.java new file mode 100644 index 0000000000..2c8e609c08 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/MappedTableHandler.java @@ -0,0 +1,525 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.handler; + +import static java.util.function.Function.identity; +import static org.forgerock.json.resource.Responses.newResourceResponse; + +import com.fasterxml.jackson.core.JsonProcessingException; +import java.io.IOException; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Types; +import java.util.ArrayList; +import java.util.Collection; +import java.util.EnumMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import org.forgerock.audit.util.JsonValueUtils; +import org.forgerock.json.JsonPointer; +import org.forgerock.json.JsonValue; +import org.forgerock.json.resource.BadRequestException; +import org.forgerock.json.resource.InternalServerErrorException; +import org.forgerock.json.resource.NotFoundException; +import org.forgerock.json.resource.PreconditionFailedException; +import org.forgerock.json.resource.ResourceException; +import org.forgerock.json.resource.ResourceResponse; +import org.forgerock.json.resource.SortKey; +import org.forgerock.openidm.repo.jdbc.Constants; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.SQLBuilder; +import org.forgerock.openidm.repo.jdbc.impl.refactor.mapper.MappedResultMapper; +import org.forgerock.openidm.repo.jdbc.impl.refactor.mapper.ResultMapper; +import org.forgerock.openidm.repo.jdbc.impl.refactor.mapper.ResultMappers; +import org.forgerock.openidm.repo.jdbc.impl.refactor.query.MappedSQLQueryFilterVisitor; +import org.forgerock.openidm.repo.jdbc.impl.refactor.query.TableQueryHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.statement.NamedParameterCollector; +import org.forgerock.openidm.repo.jdbc.impl.refactor.statement.NamedParameterSql; +import org.forgerock.openidm.repo.util.Clauses; +import org.forgerock.openidm.repo.util.TokenHandler; +import org.forgerock.util.query.QueryFilter; + +/** + * Mapped table handler that supports objects stored in a dedicated table that maps object properties + * to column values. + */ +public class MappedTableHandler extends AbstractTableHandler { + + /** + * Well-known implicit SQL statement types. + */ + protected enum ImplicitSqlType { + READ, + READFORUPDATE, + CREATE, + UPDATE, + DELETE + } + + protected final String schemaName; + + protected final String tableName; + + protected final Map columnMapping; + + private final Map implicitSql; + + private final TableQueryHandler> queryHandler; + + private final ResultMappers resultMappers; + + public MappedTableHandler( + String schemaName, + String tableName, + JsonValue columnMapping, + Map queryConfig, + Map commandConfig, + SQLExceptionHandler exceptionHandler) { + super(exceptionHandler); + + this.schemaName = schemaName; + this.tableName = tableName; + + this.columnMapping = columnMapping.keys().stream() + .collect(Collectors.toMap(identity(), name -> { + return MappedColumnConfig.parse(name, columnMapping.get(name)); + })); + + var replacementTokens = resolveReplacementTokens(); + + this.implicitSql = initializeImplicitSql().entrySet().stream() + .map(entry -> { + String resolved = new TokenHandler().replaceSomeTokens(entry.getValue(), replacementTokens); + return Map.entry(entry.getKey(), resolved); + }) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + logger.debug("Prepared implicit SQL strings {}", implicitSql); + + this.queryHandler = new TableQueryHandler<>( + replacementTokens, + queryConfig.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, entry -> { + return NamedParameterSql.parse(entry.getValue(), replacementTokens); + })), + commandConfig.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, entry -> { + return NamedParameterSql.parse(entry.getValue(), replacementTokens); + })), + this::resolveQueryFilter, + this::createResultMapper); + + this.resultMappers = new ResultMappers(objectMapper); + } + + @Override + protected Map resolveReplacementTokens() { + return Map.ofEntries( + Map.entry("_dbSchema", schemaName), + Map.entry("_table", tableName) + ); + } + + /** + * Initialize implicit SQL statements for the table handler. + * + *

+ * This method serves as extension point for vendor specific subclasses. + * + * @return mutable map with implicit SQL statements + * @see ImplicitSqlType + */ + protected Map initializeImplicitSql() { + Map result = new EnumMap<>(ImplicitSqlType.class); + result.put(ImplicitSqlType.READ, + "SELECT * FROM ${_dbSchema}.${_table} WHERE objectid = ?"); + result.put(ImplicitSqlType.READFORUPDATE, + "SELECT * FROM ${_dbSchema}.${_table} WHERE objectid = ? FOR UPDATE"); + result.put(ImplicitSqlType.CREATE, + "INSERT INTO ${_dbSchema}.${_table} (" + + columnMapping.values().stream().map(config -> config.columnName) + .collect(Collectors.joining(", ")) + + ") VALUES (" + + columnMapping.values().stream().map(config -> "?") + .collect(Collectors.joining(", ")) + +")"); + result.put(ImplicitSqlType.UPDATE, + "UPDATE ${_dbSchema}.${_table} " + + "SET " + + columnMapping.values().stream().map(config -> config.columnName + " = ?") + .collect(Collectors.joining(", ")) + + " WHERE objectid = ?"); + result.put(ImplicitSqlType.DELETE, + "DELETE FROM ${_dbSchema}.${_table} WHERE objectid = ? AND rev = ?"); + return result; + } + + @Override + protected ResultMapper> createResultMapper(ResultSetMetaData metaData) throws SQLException { + Collection columnNames = extractColumnNames(metaData); + if (columnNames.contains(ResultMappers.TOTAL_COLUMN)) { + return resultMappers.forTotalCount(); + } + var columnConfigs = this.columnMapping.values().stream() + .filter(mapping -> columnNames.contains(mapping.columnName.toLowerCase())) + .collect(Collectors.toList()); + return new MappedResultMapper(objectMapper, columnConfigs); + } + + @Override + public ResourceResponse read(String fullId, String type, String localId, Connection connection) + throws NotFoundException, IOException, SQLException { + List> results = new ArrayList<>(); + + var readSql = implicitSql.get(ImplicitSqlType.READ); + try (var readStatement = connection.prepareStatement(readSql)) { + logger.debug("Populating prepared statement {} for {}", readStatement, fullId); + readStatement.setString(1, localId); + + logger.debug("Executing: {}", readStatement); + try (var resultSet = readStatement.executeQuery()) { + var resultMapper = createResultMapper(resultSet.getMetaData()); + while (resultSet.next()) { + results.add(resultMapper.map(resultSet)); + } + } + } + + if (results.isEmpty()) { + throw new NotFoundException("Object " + fullId + " not found in " + type); + } + var result = results.get(0); + String revision = (String) result.get(Constants.OBJECT_REV); + logger.debug(" full id: {}, rev: {}, obj {}", fullId, revision, result); + + return newResourceResponse(localId, revision, new JsonValue(result)); + } + + @Override + public void create(String fullId, String type, String localId, Map obj, Connection connection) + throws PreconditionFailedException, InternalServerErrorException, IOException, SQLException { + logger.debug("Create with fullid {}", fullId); + + String revision = "0"; + + // update object properties + obj.put(Constants.OBJECT_ID, localId); + obj.put(Constants.OBJECT_REV, revision); + + var createSql = implicitSql.get(ImplicitSqlType.CREATE); + try (var createStatement = connection.prepareStatement(createSql)) { + logger.trace("Populating statement {} with params {}, {}, {}", createStatement, type, localId, revision); + populatePreparedStatement(createStatement, new JsonValue(obj)); + + logger.debug("Executing: {}", createStatement); + createStatement.executeUpdate(); + logger.debug("Created object for id {} with rev {}", fullId, revision); + } + } + + /** + * Populate prepared statement for positional parameters in the same order as {@link #columnMapping}. + * + * @param statement the statement to populate + * @param jsonObject the object to extract values from + * @throws BadRequestException when there are unmapped properties + * @throws InternalServerErrorException in case an illegal state is encountered + * @throws JsonProcessingException in case of property mapping error + * @throws SQLException in case of DB failure + */ + private void populatePreparedStatement(PreparedStatement statement, JsonValue jsonObject) + throws BadRequestException, InternalServerErrorException, JsonProcessingException, SQLException { + var unmappedFields = jsonObject.copy(); + + int index = 0; + for (MappedColumnConfig config : columnMapping.values()) { + unmappedFields.remove(config.propertyName); + applyStatementParameter(statement, ++index, config, jsonObject.get(config.propertyName)); + } + + // some tables don't map _id and _rev (e.g., audit) + unmappedFields.remove(Constants.OBJECT_ID); + unmappedFields.remove(Constants.OBJECT_REV); + if (!unmappedFields.asMap().isEmpty()) { + throw new BadRequestException("Unmapped fields " + JsonValueUtils.flatten(unmappedFields) + + " for table " + schemaName + "." + tableName); + } + } + + /** + * Set prepared statement parameter according to the provided column configuration. + * + * @param statement the statement for which the parameter should be set + * @param index the parameter index + * @param config the column configuration + * @param value the parameter value to set + * @throws InternalServerErrorException in case an illegal state is encountered + * @throws JsonProcessingException in case of property mapping error + * @throws SQLException in case of DB failure + */ + protected void applyStatementParameter(PreparedStatement statement, int index, MappedColumnConfig config, + JsonValue value) throws InternalServerErrorException, JsonProcessingException, SQLException { + Object rawValue = value != null ? value.getObject() : null; + switch (config.valueType) { + case STRING: + if (rawValue != null && !(rawValue instanceof String)) { + if (logger.isTraceEnabled()) { + logger.trace("Value for {} is getting stringified from type {} to store in a STRING " + + "column as value: {}", config.propertyName, rawValue.getClass(), rawValue); + } + rawValue = objectMapper.writeValueAsString(rawValue); + } + statement.setString(index, (String) rawValue); + break; + case NUMBER: + if (rawValue instanceof Integer) { + statement.setInt(index, (Integer) rawValue); + } else if (rawValue instanceof Long) { + statement.setLong(index, (Long) rawValue); + } else if (rawValue instanceof Float) { + statement.setFloat(index, (Float) rawValue); + } else if (rawValue instanceof Double) { + statement.setDouble(index, (Double) rawValue); + } else if (rawValue == null) { + statement.setNull(index, Types.INTEGER); + } else { + throw new InternalServerErrorException("Invalid value type " + rawValue.getClass()); + } + break; + case BOOLEAN: + if (rawValue instanceof Boolean) { + statement.setObject(index, ((Boolean) rawValue).booleanValue() ? 1 : 0); + } else if (rawValue == null) { + statement.setNull(index, Types.BIT); + } else { + throw new InternalServerErrorException("Invalid value type " + rawValue.getClass()); + } + break; + + case JSON_LIST: + statement.setString(index, value != null + ? objectMapper.writeValueAsString(value.asList()) : null); + break; + case JSON_MAP: + statement.setString(index, value != null + ? objectMapper.writeValueAsString(value.asMap()) : null); + break; + default: + throw new InternalServerErrorException("Unsupported DB column type " + config.valueType); + } + } + + /** + * Read an object with FOR UPDATE lock applied. + * + * @param fullId qualified id of component type and id + * @param type the qualifier of the object to retrieve + * @param localId the identifier without the qualifier of the object to retrieve + * @param connection database connection to use + * @return the row as a map of column name/value pairs for the requested object + * @throws NotFoundException if the requested object was not found in the DB + * @throws java.sql.SQLException for general DB issues + */ + protected Map readForUpdate(String fullId, String type, String localId, Connection connection) + throws NotFoundException, SQLException { + var readSql = implicitSql.get(ImplicitSqlType.READFORUPDATE); + try (var readStatement = connection.prepareStatement(readSql)) { + logger.trace("Populating prepared statement {} for {}", readStatement, fullId); + readStatement.setString(1, localId); + + logger.debug("Executing: {}", readStatement); + try (var resultSet = readStatement.executeQuery()) { + if (!resultSet.next()) { + throw new NotFoundException("Object " + fullId + " not found in " + type); + } + try { + return resultMappers.forObjectRef(true).map(resultSet); + } catch (IOException e) { + throw new IllegalStateException("Unexpected error", e); + } + } + } + } + + @Override + public void update(String fullId, String type, String localId, String rev, Map obj, + Connection connection) throws NotFoundException, PreconditionFailedException, BadRequestException, + InternalServerErrorException, IOException, SQLException { + logger.debug("Update with fullid {}", fullId); + + // read existing object state + JsonValue existingObj = new JsonValue(readForUpdate(fullId, type, localId, connection)); + String existingRev = existingObj.get(Constants.OBJECT_REV).asString(); + logger.debug("Update existing object {} rev: {}", fullId, existingRev); + + // perform optimistic version locking + if (!existingRev.equals(rev)) { + throw new PreconditionFailedException("Update rejected as current Object revision " + existingRev + + " is different than expected by caller (" + rev + "), the object has changed since retrieval."); + } + + // support changing object identifier + String updatedId = (String) obj.get(Constants.OBJECT_ID); + if (updatedId != null && !updatedId.equals(localId)) { + logger.debug("Object identifier is changing from " + localId + " to " + updatedId); + } else { + updatedId = localId; // if it hasn't changed, use the existing ID + obj.put(Constants.OBJECT_ID, updatedId); // ensure the ID is saved in the object + } + + // handle revision update + String updatedRev = Integer.toString(Integer.parseInt(rev) + 1); + obj.put(Constants.OBJECT_REV, updatedRev); + + var updateSql = implicitSql.get(ImplicitSqlType.UPDATE); + try (var updateStatement = connection.prepareStatement(updateSql)) { + logger.trace("Populating prepared statement {} with {} {} {}", updateStatement, fullId, updatedId, + updatedRev); + populatePreparedStatement(updateStatement, new JsonValue(obj)); + updateStatement.setString(columnMapping.size() + 1, localId); + logger.debug("Update statement: {}", updateStatement); + + int updateCount = updateStatement.executeUpdate(); + logger.trace("Updated rows: {} for {}", updateCount, fullId); + if (updateCount != 1) { + throw new InternalServerErrorException("Update execution did not result in updating 1 " + + "row as expected. Updated rows: " + updateCount); + } + } + } + + @Override + public void delete(String fullId, String type, String localId, String rev, Connection connection) + throws SQLException, ResourceException { + logger.debug("Delete with fullid {}", fullId); + + // read existing object state + JsonValue existingObj = new JsonValue(readForUpdate(fullId, type, localId, connection)); + String existingRev = existingObj.get(Constants.OBJECT_REV).asString(); + + // perform optimistic version locking + if (!"*".equals(existingRev) && !existingRev.equals(rev)) { + throw new PreconditionFailedException("Delete rejected as current Object revision " + existingRev + + " is different than the expected by caller " + rev + ", the object has changed since retrieval."); + } + + var deleteSql = implicitSql.get(ImplicitSqlType.DELETE); + try (var deleteStatement = connection.prepareStatement(deleteSql)) { + logger.trace("Populating prepared statement {} for {} {} {} {}", deleteStatement, fullId, type, localId, rev); + deleteStatement.setString(1, localId); + deleteStatement.setString(2, rev); + logger.debug("Delete statement: {}", deleteStatement); + + int deletedRows = deleteStatement.executeUpdate(); + logger.trace("Deleted {} rows for id : {} {}", deletedRows, localId); + if (deletedRows < 1) { + throw new InternalServerErrorException("Deleting object for " + fullId + " failed, DB reported " + deletedRows + " rows deleted"); + } else { + logger.debug("Delete for id succeeded: {} revision: {}", localId, rev); + } + } + } + + @Override + public List> query(String type, Map params, Connection connection) + throws SQLException, ResourceException { + return queryHandler.query(type, params, connection); + } + + @Override + protected SQLBuilder resolveQueryFilter(QueryFilter queryFilter, List sortKeys, + Map sqlParams) { + var builder = createSqlBuilder(); + + var configResolver = createConfigResolver(); + + var visitor = createFilterVisitor(configResolver); + builder.addColumn("obj.*") + .from("${_dbSchema}.${_table}", "obj") + .where(Clauses.where(queryFilter.accept(visitor, new NamedParameterCollector(sqlParams)).toSQL())); + + if (sortKeys != null) { + for (SortKey sortKey : sortKeys) { + var config = configResolver.resolve(sortKey.getField()); + builder.orderBy(config.columnName, sortKey.isAscendingOrder()); + } + } + + return builder; + } + + /** + * Create new column configuration resolver. + * + * @return new configuration resolver instance + */ + protected MappedConfigResolver createConfigResolver() { + Map columnConfig = columnMapping.values().stream() + .collect(Collectors.toMap(value -> value.propertyName, value -> value)); + return field -> { + var config = columnConfig.get(field); + if (config == null) { + throw new IllegalArgumentException("Unknown object field: " + field.toString()); + } + return config; + }; + } + + /** + * Create new {@link SQLBuilder} to render query filter queries. + * + * @return new SQLBuilder instance + */ + protected SQLBuilder createSqlBuilder() { + return new SQLBuilder() { + @Override + public String toSQL() { + return "SELECT " + getColumns().toSQL() + + getFromClause().toSQL() + + getJoinClause().toSQL() + + getWhereClause().toSQL() + + getOrderByClause().toSQL() + + " LIMIT ${int:_pageSize} " + + " OFFSET ${int:_pagedResultsOffset}"; + } + }; + } + + /** + * Create new {@link MappedSQLQueryFilterVisitor} to render query filter queries. + * + * @param configResolver column configuration resolver + * @return new MappedSQLQueryFilterVisitor instance + */ + protected MappedSQLQueryFilterVisitor createFilterVisitor(MappedConfigResolver configResolver) { + return new MappedSQLQueryFilterVisitor(configResolver, objectMapper); + } + + @Override + public Integer queryCount(String type, Map params, Connection connection) + throws SQLException, ResourceException { + return queryHandler.queryCount(type, params, connection); + } + + @Override + public Integer command(String type, Map params, Connection connection) + throws SQLException, ResourceException { + return queryHandler.command(type, params, connection); + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/mapper/ColumnResultMapper.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/mapper/ColumnResultMapper.java new file mode 100644 index 0000000000..3422d93481 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/mapper/ColumnResultMapper.java @@ -0,0 +1,51 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.mapper; + +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +/** + * Column-based result mapper that maps raw column values to object properties. + */ +public class ColumnResultMapper implements ResultMapper> { + + private final int columnCount; + + private final List columnNames = new ArrayList<>(); + + public ColumnResultMapper(ResultSetMetaData metaData) throws SQLException { + columnCount = metaData.getColumnCount(); + for (int idx = 1; idx <= columnCount; idx++) { + columnNames.add(metaData.getColumnName(idx)); + } + } + + @Override + public Map map(ResultSet rs) throws SQLException { + Map result = new LinkedHashMap(); + for (int idx = 1; idx <= columnCount; idx++) { + result.put(columnNames.get(idx - 1).toLowerCase(), rs.getObject(idx)); + } + return result; + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/mapper/MappedResultMapper.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/mapper/MappedResultMapper.java new file mode 100644 index 0000000000..86ff614341 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/mapper/MappedResultMapper.java @@ -0,0 +1,114 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.mapper; + +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import org.forgerock.json.JsonValue; +import org.forgerock.json.resource.InternalServerErrorException; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.MappedColumnConfig; +import org.forgerock.openidm.util.JsonUtil; + +/** + * Mapped table result handler that maps columns based on provided column mapping configuration. + */ +public class MappedResultMapper implements ResultMapper> { + + private final ObjectMapper objectMapper; + + private final Collection columnConfigs; + + /** + * Create new result mapper using the given object mapper and column mapping that will be used for + * result set extraction. + * + *

+ * Column mapping configuration should be contain only columns that are actually present in the + * processed result set. + * + * @param objectMapper the object mapper instance + * @param columnConfigs the column mapping to use to extract data from the result set + */ + public MappedResultMapper(ObjectMapper objectMapper, Collection columnConfigs) { + this.objectMapper = objectMapper; + this.columnConfigs = columnConfigs; + } + + @Override + public Map map(ResultSet resultSet) throws SQLException, IOException { + JsonValue result = new JsonValue(new LinkedHashMap()); + for (MappedColumnConfig config : columnConfigs) { + Object value = null; + switch (config.valueType) { + case STRING: + value = resultSet.getString(config.columnName); + if (JsonUtil.isEncrypted((String) value)) { + value = convertToJson(config.columnName, "encrypted", (String) value, Map.class).asMap(); + } + break; + case NUMBER: + // convert to narrow down data type to Integer/Long/Double (Float is not used by Jackson) + value = resultSet.getString(config.columnName); + if (value != null) { + value = convertToJson(config.columnName, config.valueType.name(), (String) value, Number.class) + .getObject(); + } + break; + case BOOLEAN: + value = resultSet.getObject(config.columnName); + if (value instanceof Number) { + value = ((Number) value).intValue() == 1; + } else if (value instanceof Boolean) { + value = ((Boolean) value).booleanValue(); + } else if (value != null) { + throw new InternalServerErrorException("Unsupported boolean value class " + value.getClass().getName()); + } + break; + case JSON_LIST: + value = convertToJson(config.columnName, config.valueType.name(), + resultSet.getString(config.columnName), List.class).asList(); + break; + case JSON_MAP: + value = convertToJson(config.columnName, config.valueType.name(), + resultSet.getString(config.columnName), Map.class).asMap(); + break; + default: + throw new InternalServerErrorException("Unsupported DB column type " + config.valueType); + } + result.putPermissive(config.propertyName, value); + } + return result.asMap(); + } + + private JsonValue convertToJson(String name, String nameType, String value, Class valueType) + throws InternalServerErrorException { + if (value != null) { + try { + return new JsonValue(objectMapper.readValue(value, valueType)); + } catch (IOException e) { + throw new InternalServerErrorException("Unable to map " + nameType + " value for " + name, e); + } + } + return new JsonValue(null); + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/mapper/ResultMapper.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/mapper/ResultMapper.java new file mode 100644 index 0000000000..bd3a10f625 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/mapper/ResultMapper.java @@ -0,0 +1,45 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.mapper; + +import java.io.IOException; +import java.sql.ResultSet; +import java.sql.SQLException; + +/** + * Database query result set mapper. + * + *

+ * The responsibility of this component is to map a single object from the provided + * {@link ResultSet}. Mapper implementations should not move the cursor of the result + * set, i.e. iterating through the result set is responsibility of the caller. + * + * @param mapped object type + */ +@FunctionalInterface +public interface ResultMapper { + + /** + * Map single object from the result set. + * + * @param resultSet the result set containing single row + * @return mapped object + * @throws IOException in case of JSON mapping error + * @throws SQLException in case of SQL error + */ + T map(ResultSet resultSet) throws SQLException, IOException; + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/mapper/ResultMappers.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/mapper/ResultMappers.java new file mode 100644 index 0000000000..e505acedb8 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/mapper/ResultMappers.java @@ -0,0 +1,84 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.mapper; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.sql.SQLException; +import java.util.Map; + +/** + * This class provides convenience factory methods to instantiate basic well-defined result mappers. + */ +public class ResultMappers { + + /** + * Column holding full object state in JSON format. + */ + public static final String OBJECT_COLUMN = "fullobject"; + + /** + * Column holding the total number of matched rows. + */ + public static final String TOTAL_COLUMN = "total"; + + /** + * Type reference for generic map-based JSON object. + */ + private static final TypeReference> OBJECT_TYPE_REF = new TypeReference<>() {}; + + private final ObjectMapper objectMapper; + + /** + * Create new results mapper factory instance. + * + * @param objectMapper object mapper to use when mapping the object state + */ + public ResultMappers(ObjectMapper objectMapper) { + this.objectMapper = objectMapper; + } + + /** + * Create mapper for full object state in fullobject column. + * + * @return full object state mapper + */ + public ResultMapper> forFullObject() { + return rs -> objectMapper.readValue(rs.getString(OBJECT_COLUMN), OBJECT_TYPE_REF); + } + + /** + * Create simple mapper for total count. + * + * @return count query result mapper + */ + public ResultMapper> forTotalCount() { + return rs -> Map.of(TOTAL_COLUMN, rs.getInt(TOTAL_COLUMN)); + } + + /** + * Create mapper for simple object reference based on objectid and rev columns. + * + * @param revision whether the result set contains object revision + * @return object reference result mapper + */ + public ResultMapper> forObjectRef(boolean revision) throws SQLException { + return revision + ? rs -> Map.of("_id", rs.getString("objectid"), "_rev", rs.getString("rev")) + : rs -> Map.of("_id", rs.getString("objectid")); + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/query/GenericSQLQueryFilterVisitor.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/query/GenericSQLQueryFilterVisitor.java new file mode 100644 index 0000000000..ccf2a92bbf --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/query/GenericSQLQueryFilterVisitor.java @@ -0,0 +1,215 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. + * + * Copyright 2015 ForgeRock AS. All rights reserved. + * + * The contents of this file are subject to the terms + * of the Common Development and Distribution License + * (the License). You may not use this file except in + * compliance with the License. + * + * You can obtain a copy of the License at + * http://forgerock.org/license/CDDLv1.0.html + * See the License for the specific language governing + * permission and limitations under the License. + * + * When distributing Covered Code, include this CDDL + * Header Notice in each file and include the License file + * at http://forgerock.org/license/CDDLv1.0.html + * If applicable, add the following below the CDDL Header, + * with the fields enclosed by brackets [] replaced by + * your own identifying information: + * "Portions Copyrighted [year] [name of copyright owner]" + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.query; + +import static org.forgerock.openidm.repo.util.Clauses.and; +import static org.forgerock.openidm.repo.util.Clauses.not; +import static org.forgerock.openidm.repo.util.Clauses.or; +import static org.forgerock.openidm.repo.util.Clauses.where; + +import java.util.List; +import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; +import org.forgerock.json.JsonPointer; +import org.forgerock.openidm.repo.jdbc.impl.SQLBuilder; +import org.forgerock.openidm.repo.jdbc.impl.refactor.statement.NamedParameterCollector; +import org.forgerock.openidm.repo.util.AbstractSQLQueryFilterVisitor; +import org.forgerock.openidm.repo.util.Clause; +import org.forgerock.openidm.util.ResourceUtil; +import org.forgerock.util.query.QueryFilter; +import org.forgerock.util.query.QueryFilterVisitor; + +/** + * {@link QueryFilterVisitor} for generating WHERE clause clause for generic table schema. + * + *

+ * Filter visitor does not support contains filters for collection members. Only simple string + * based contains is supported. + */ +// TODO support collection based assertions +public class GenericSQLQueryFilterVisitor extends AbstractSQLQueryFilterVisitor { + + private final int searchableLength; + + private final SQLBuilder builder; + + /** + * Construct a QueryFilterVisitor to produce SQL for managed objects using the generic table structure. + * + * @param searchableLength the searchable length; properties longer than this will be trimmed to this length + * @param builder the {@link SQLBuilder} to use to keep track of the select columns, table joins, and order by lists + */ + public GenericSQLQueryFilterVisitor(final int searchableLength, SQLBuilder builder) { + this.searchableLength = searchableLength; + this.builder = builder; + } + + private boolean isNumeric(final Object valueAssertion) { + return valueAssertion instanceof Integer + || valueAssertion instanceof Long + || valueAssertion instanceof Float + || valueAssertion instanceof Double; + } + + private boolean isBoolean(final Object valueAssertion) { + return valueAssertion instanceof Boolean; + } + + private Object trimValue(final Object value) { + if (isNumeric(value) || isBoolean(value)) { + return value; + } else { + return StringUtils.left(value.toString(), searchableLength); + } + } + + /** + * Generate the WHERE clause for properties table for a numeric value assertion. + * + * @param joinAlias the property table alias + * @param operand the comparison operand + * @param valueParam the value placeholder + * @return SQL WHERE clause for properties table + */ + protected Clause buildNumericValueClause(String propTable, String operand, String valueParam) { + // XXX Should we distinguish between decimal and integer values? Jackson makes that distinction (7 vs 7.0). + return where(propTable + ".proptype = 'java.lang.Integer'") + .or(propTable + ".proptype = 'java.lang.Long'") + // we can skip java.lang.Float as Jackson is not using it by default + .or(propTable + ".proptype = 'java.lang.Double'") + // CAST to DECIMAL as that is the most generic thing to do + .and("CAST(" + propTable + ".propvalue AS DECIMAL) " + operand + " ${" + valueParam + "}"); + } + + /** + * Generate the WHERE clause for properties table for a boolean value assertion. + * + * @param joinAlias the property table alias + * @param operand the comparison operand + * @param valueParam the value placeholder + * @return SQL WHERE clause for properties table + */ + protected Clause buildBooleanValueClause(String propTable, String operand, String valueParam) { + return where(propTable + ".proptype = 'java.lang.Boolean'") + .and(where(propTable + ".propvalue " + operand + " ${" + valueParam + "}")); + } + + /** + * Generate the WHERE clause for properties table for a string value assertion. + * + * @param joinAlias the property table alias + * @param operand the comparison operand + * @param valueParam the value placeholder + * @return SQL WHERE clause for properties table + */ + protected Clause buildStringValueClause(String joinAlias, String operand, String valueParam) { + return where(joinAlias + ".propvalue " + operand + " ${" + valueParam + "}"); + } + + @Override + public Clause visitValueAssertion(NamedParameterCollector collector, String operand, JsonPointer field, Object valueAssertion) { + var valueParam = collector.register("v", convertValueAssertion(valueAssertion)); + if (ResourceUtil.RESOURCE_FIELD_CONTENT_ID_POINTER.equals(field)) { + return where("obj.objectid " + operand + " ${" + valueParam + "}"); + } else { + String propParam = collector.register("k", field.toString()); + String joinAlias = collector.generate("p"); + final Clause valueClause; + if (isNumeric(valueAssertion)) { + valueClause = buildNumericValueClause(joinAlias, operand, valueParam); + } else if (isBoolean(valueAssertion)) { + valueClause = buildBooleanValueClause(joinAlias, operand, valueParam); + } else { + valueClause = buildStringValueClause(joinAlias, operand, valueParam); + } + builder.leftJoin("${_dbSchema}.${_propTable}", joinAlias) + .on(where(joinAlias + ".${_mainTable}_id = obj.id") + .and(where(joinAlias + ".propkey = ${" + propParam + "}"))); + return valueClause; + } + } + + /** + * Convert value assertion to SQL parameter type. + * + * @param valueAssertion value assertion to convert + * @return converted assertion + */ + protected Object convertValueAssertion(Object valueAssertion) { + if (valueAssertion instanceof Boolean) { + return ((Boolean) valueAssertion).booleanValue() ? "true" : "false"; + } + return valueAssertion; + } + + @Override + public Clause visitAndFilter(NamedParameterCollector collector, List> subfilters) { + return and(subfilters.stream().map(filter -> filter.accept(this, collector)).collect(Collectors.toList())); + } + + @Override + public Clause visitOrFilter(NamedParameterCollector collector, List> subfilters) { + return or(subfilters.stream().map(filter -> filter.accept(this, collector)).collect(Collectors.toList())); + } + + @Override + public Clause visitPresentFilter(NamedParameterCollector collector, JsonPointer field) { + if (ResourceUtil.RESOURCE_FIELD_CONTENT_ID_POINTER.equals(field)) { + return where("(obj.objectid IS NOT NULL)"); // always TRUE -> NOT NULL is enforced by the schema + } else { + var propParam = collector.register("k", field.toString()); + var joinAlias = collector.generate("p"); + builder.leftJoin("${_dbSchema}.${_propTable}", joinAlias) + .on(where(joinAlias + ".${_mainTable}_id = obj.id") + .and(joinAlias + ".propkey = ${" + propParam + "}")); + return where(joinAlias + ".propvalue IS NOT NULL"); + } + } + + @Override + public Clause visitBooleanLiteralFilter(NamedParameterCollector collector, boolean value) { + return where(value ? "1 = 1" : "1 <> 1"); + } + + @Override + public Clause visitNotFilter(NamedParameterCollector collector, QueryFilter subFilter) { + return not(subFilter.accept(this, collector)); + } + + @Override + public Clause visitContainsFilter(NamedParameterCollector collector, JsonPointer field, Object valueAssertion) { + return super.visitContainsFilter(collector, field, trimValue(valueAssertion)); + } + + @Override + public Clause visitEqualsFilter(NamedParameterCollector collector, JsonPointer field, Object valueAssertion) { + return super.visitEqualsFilter(collector, field, trimValue(valueAssertion)); + } + + @Override + public Clause visitStartsWithFilter(NamedParameterCollector collector, JsonPointer field, Object valueAssertion) { + return super.visitStartsWithFilter(collector, field, trimValue(valueAssertion)); + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/query/MappedSQLQueryFilterVisitor.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/query/MappedSQLQueryFilterVisitor.java new file mode 100644 index 0000000000..a478f68fa6 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/query/MappedSQLQueryFilterVisitor.java @@ -0,0 +1,81 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.query; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.forgerock.json.JsonPointer; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.MappedColumnConfig; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.MappedColumnConfig.ValueType; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.MappedConfigResolver; +import org.forgerock.openidm.repo.jdbc.impl.refactor.statement.NamedParameterCollector; +import org.forgerock.openidm.repo.util.StringSQLQueryFilterVisitor; +import org.forgerock.openidm.repo.util.StringSQLRenderer; +import org.forgerock.util.query.QueryFilterVisitor; + +/** + * {@link QueryFilterVisitor} for generating WHERE clause for mapped table schema. + */ +public class MappedSQLQueryFilterVisitor extends StringSQLQueryFilterVisitor { + + protected final MappedConfigResolver configResolver; + + protected final ObjectMapper objectMapper; + + public MappedSQLQueryFilterVisitor(MappedConfigResolver configResolver, ObjectMapper objectMapper) { + this.configResolver = configResolver; + this.objectMapper = objectMapper; + } + + @Override + public StringSQLRenderer visitValueAssertion(NamedParameterCollector collector, String operand, JsonPointer field, + Object valueAssertion) { + MappedColumnConfig config = configResolver.resolve(field); + + // convert column value to DECIMAL to ensure correct operator behavior + if (isNumeric(valueAssertion) && config.valueType == ValueType.NUMBER) { + String paramName = collector.register("v", valueAssertion); + return new StringSQLRenderer( + "CAST(" + config.columnName + " AS DECIMAL)" + + " " + operand + " " + + "CAST(${" + paramName + "} AS DECIMAL)"); + } + + // convert column value to BIT to ensure database vendor support + if (valueAssertion instanceof Boolean && config.valueType == ValueType.BOOLEAN) { + String paramName = collector.register("v", ((Boolean) valueAssertion).booleanValue() ? 1 : 0); + return new StringSQLRenderer( + "CAST(" + config.columnName + " AS BIT)" + + " " + operand + " " + + "CAST(${" + paramName + "} AS BIT)"); + } + + String paramValue; + try { + paramValue = valueAssertion instanceof String + ? (String) valueAssertion + : objectMapper.writeValueAsString(valueAssertion); + } catch (JsonProcessingException e) { + throw new IllegalStateException("Unexpected JSON conversion error", e); + } + + String paramName = collector.register("v", paramValue); + return new StringSQLRenderer( + config.columnName + + " " + operand + " " + + "${" + paramName + "}"); + } + + + @Override + public StringSQLRenderer visitPresentFilter(NamedParameterCollector collector, JsonPointer field) { + MappedColumnConfig config = configResolver.resolve(field); + return new StringSQLRenderer(config.columnName + " IS NOT NULL"); + } + + protected boolean isNumeric(final Object valueAssertion) { + return valueAssertion instanceof Integer + || valueAssertion instanceof Long + || valueAssertion instanceof Float + || valueAssertion instanceof Double; + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/query/QueryFilterResolver.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/query/QueryFilterResolver.java new file mode 100644 index 0000000000..3cf25c8f6e --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/query/QueryFilterResolver.java @@ -0,0 +1,27 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.query; + +import java.util.List; +import java.util.Map; +import org.forgerock.json.JsonPointer; +import org.forgerock.json.resource.SortKey; +import org.forgerock.openidm.repo.jdbc.impl.SQLBuilder; +import org.forgerock.util.query.QueryFilter; + +/** + * Functional interface for transforming query filters to SQL queries. + */ +@FunctionalInterface +public interface QueryFilterResolver { + + /** + * Render query filter as SQL query. + * + * @param queryFilter the query filter to render + * @param sortKeys sort keys + * @param sqlParams query parameters to be used as named parameters + * @return SQL builder instance that is able to produce the final SQL string + */ + SQLBuilder resolveQueryFilter(QueryFilter queryFilter, List sortKeys, + Map sqlParams); + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/query/ResultMapperFactory.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/query/ResultMapperFactory.java new file mode 100644 index 0000000000..8eed58358d --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/query/ResultMapperFactory.java @@ -0,0 +1,24 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.query; + +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import org.forgerock.openidm.repo.jdbc.impl.refactor.mapper.ResultMapper; + +/** + * Functional interface for creating {@link ResultMapper}s based on result set meta data. + * + * @param result object type + */ +@FunctionalInterface +public interface ResultMapperFactory { + + /** + * Create new result mapper based on the provided result set meta data. + * + * @param metaData result set meta data + * @return result mapper instance + * @throws SQLException in case of DB failure + */ + ResultMapper createResultMapper(ResultSetMetaData metaData) throws SQLException; + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/query/TableQueryHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/query/TableQueryHandler.java new file mode 100644 index 0000000000..b9b36b307d --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/query/TableQueryHandler.java @@ -0,0 +1,292 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.query; + +import static org.forgerock.openidm.repo.QueryConstants.PAGED_RESULTS_OFFSET; +import static org.forgerock.openidm.repo.QueryConstants.PAGE_SIZE; +import static org.forgerock.openidm.repo.QueryConstants.QUERY_EXPRESSION; +import static org.forgerock.openidm.repo.QueryConstants.QUERY_FILTER; +import static org.forgerock.openidm.repo.QueryConstants.QUERY_ID; +import static org.forgerock.openidm.repo.QueryConstants.RESOURCE_NAME; +import static org.forgerock.openidm.repo.QueryConstants.SORT_KEYS; +import static org.forgerock.openidm.repo.jdbc.impl.refactor.statement.NamedParameterSupport.applyStatementParams; +import static org.forgerock.openidm.repo.jdbc.impl.refactor.statement.NamedParameterSupport.collectSqlParams; +import static org.forgerock.openidm.repo.jdbc.impl.refactor.statement.NamedParameterSupport.prepareSqlString; + +import java.io.IOException; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.forgerock.json.JsonPointer; +import org.forgerock.json.resource.BadRequestException; +import org.forgerock.json.resource.InternalServerErrorException; +import org.forgerock.json.resource.ResourceException; +import org.forgerock.json.resource.SortKey; +import org.forgerock.openidm.repo.jdbc.Constants; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.statement.NamedParameterSql; +import org.forgerock.openidm.repo.jdbc.impl.refactor.statement.PreparedSql; +import org.forgerock.openidm.smartevent.EventEntry; +import org.forgerock.openidm.smartevent.Name; +import org.forgerock.openidm.smartevent.Publisher; +import org.forgerock.util.query.QueryFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +/** + * Table query and command handling logic. + * + *

+ * This class simply serves as a place to offload query and command handling logic from {@link TableHandler}s + * to keep them more simple and to share logic between mapped and generic implementations. + * + * @param result object type + */ +public class TableQueryHandler { + + /** + * Monitoring event name prefix. + */ + private static final String EVENT_RAW_QUERY_PREFIX = "openidm/internal/repo/jdbc/raw/query/"; + + private final Logger logger = LoggerFactory.getLogger(getClass()); + + private final Map replacementTokens; + + private final Map queryConfig; + + private final Map commandConfig; + + private final QueryFilterResolver filterResolver; + + private final ResultMapperFactory mapperFactory; + + public TableQueryHandler( + Map replacementTokens, + Map queryConfig, + Map commandConfig, + QueryFilterResolver filterResolver, + ResultMapperFactory mapperFactory) { + this.replacementTokens = replacementTokens; + this.queryConfig = queryConfig; + this.commandConfig = commandConfig; + this.filterResolver = filterResolver; + this.mapperFactory = mapperFactory; + } + + /** + * @see TableHandler#queryCount(String, Map, Connection) + */ + public List query(String type, Map params, Connection connection) + throws SQLException, ResourceException { + // create named parameters map that will be used to populate prepared statement + Map sqlParams = new HashMap<>(); + sqlParams.put(RESOURCE_NAME, type); + + // determine paging parameters + int pageSize = (Integer) params.get(PAGE_SIZE); + if (pageSize <= 0) { + sqlParams.put(PAGE_SIZE, Integer.MAX_VALUE); + sqlParams.put(PAGED_RESULTS_OFFSET, 0); + } else { + sqlParams.put(PAGE_SIZE, pageSize); + sqlParams.put(PAGED_RESULTS_OFFSET, params.get(PAGED_RESULTS_OFFSET)); + } + + // resolve query SQL + NamedParameterSql querySql; + if (params.containsKey(QUERY_FILTER)) { + @SuppressWarnings("unchecked") + QueryFilter queryFilter = (QueryFilter) params.get(QUERY_FILTER); + querySql = NamedParameterSql.parse( + filterResolver.resolveQueryFilter(queryFilter, resolveSortKeys(params), sqlParams).toSQL(), + replacementTokens); + } else if (params.containsKey(QUERY_ID)) { + String queryId = (String) params.get(QUERY_ID); + querySql = queryConfig.get(queryId); + if (querySql == null) { + throw new BadRequestException("The passed query identifier " + queryId + + " does not match any configured queries on the JDBC repository service."); + } + collectSqlParams(params, querySql, sqlParams); + } else if (params.containsKey(QUERY_EXPRESSION)) { + querySql = NamedParameterSql.parse((String) params.get(QUERY_EXPRESSION)); + collectSqlParams(params, querySql , sqlParams); + } else { + throw new BadRequestException("Either " + QUERY_ID + ", " + QUERY_EXPRESSION + ", or " + + QUERY_FILTER + " to identify/define a query must be passed in the parameters. " + + params); + } + + PreparedSql preparedSql = prepareSqlString(querySql, sqlParams); + + List result = new ArrayList<>(); + EventEntry measure = startQueryMeasure(params, querySql.getSqlString()); + try (var queryStatement = connection.prepareStatement(preparedSql.getSqlString())) { + applyStatementParams(queryStatement, preparedSql.getParameters()); + try (var resultSet = queryStatement.executeQuery()) { + var resultMapper = mapperFactory.createResultMapper(resultSet.getMetaData()); + while (resultSet.next()) { + result.add(resultMapper.map(resultSet)); + } + } + measure.setResult(result); + } catch (IOException ex) { + throw new InternalServerErrorException("Failed to convert result objects for query " + + querySql.getSqlString() + " with params: " + params + " message: " + + ex.getMessage(), ex); + } + + return result; + } + + /** + * Resolve sort keys for the query filter request making sure there is always a stable iteration + * order by including sort key for object identifier. + * + * @param params query request parameters + * @return list of sort keys + */ + @SuppressWarnings("unchecked") + private List resolveSortKeys(Map params) { + var sortKeys = (List) params.get(SORT_KEYS); + if (sortKeys == null) { + sortKeys = Collections.EMPTY_LIST; + } + var containsId = sortKeys.stream().anyMatch(sortKey -> { + return Constants.OBJECT_ID.equals(sortKey.getField().toString()); + }); + if (!containsId) { + sortKeys = Stream.concat(sortKeys.stream(), Stream.of(SortKey.ascendingOrder(Constants.OBJECT_ID))) + .collect(Collectors.toList()); + } + return sortKeys; + } + + /** + * @see TableHandler#queryCount(String, Map, Connection) + */ + public Integer queryCount(String type, Map params, Connection connection) + throws SQLException, ResourceException { + // create named parameters map that will be used to populate prepared statement + Map sqlParams = new HashMap<>(); + sqlParams.put(RESOURCE_NAME, type); + + // resolve query SQL + NamedParameterSql countSql = null; + if (params.containsKey(QUERY_ID)) { + countSql = queryConfig.get(params.get(QUERY_ID) + "-count"); + if (countSql == null) { + return null; // no count query defined + } + collectSqlParams(params, countSql, sqlParams); + } else if (params.containsKey(QUERY_FILTER)) { + @SuppressWarnings("unchecked") + QueryFilter queryFilter = (QueryFilter) params.get(QUERY_FILTER); + countSql = NamedParameterSql.parse( + filterResolver.resolveQueryFilter(queryFilter, null, sqlParams).toCountSQL(), + replacementTokens); + } + + if (countSql == null) { + return null; // no count query defined + } + + PreparedSql preparedSql = prepareSqlString(countSql, sqlParams); + + EventEntry measure = startQueryMeasure(Map.of(QUERY_ID, "queryCount"), countSql.getSqlString()); + try (var countStatement = connection.prepareStatement(preparedSql.getSqlString())) { + applyStatementParams(countStatement, preparedSql.getParameters()); + try (var resultSet = countStatement.executeQuery()) { + while (!resultSet.next()) { + return null; // result should not be empty + } + var count = resultSet.getInt(1); // expecting only single column + measure.setResult(count); + return count; + } + } catch (IOException ex) { + throw new InternalServerErrorException("Failed to convert result objects for query " + + countSql.getSqlString() + " with params: " + params + " message: " + + ex.getMessage(), ex); + } finally { + measure.end(); + } + } + + /** + * @see TableHandler#command(String, Map, Connection) + */ + public Integer command(String type, Map params, Connection connection) + throws SQLException, ResourceException { + // create named parameters map that will be used to populate prepared statement + Map sqlParams = new HashMap<>(); + sqlParams.put(RESOURCE_NAME, type); + + // resolve command SQL + NamedParameterSql commandSql = null; + if (params.containsKey("commandId")) { + commandSql = commandConfig.get(params.get("commandId")); + } else if (params.containsKey("commandExpression")) { + commandSql = NamedParameterSql.parse((String) params.get("commandExpression")); + } else { + throw new BadRequestException("Either commandId or commandExpression " + + " to identify/define a query must be passed in the parameters. " + params); + } + + if (commandSql == null) { + throw new BadRequestException("The passed command identifier " + params.get("commandId") + + " does not match any configured commands on the JDBC repository service."); + } + + collectSqlParams(params, commandSql, sqlParams); + PreparedSql preparedSql = prepareSqlString(commandSql, sqlParams); + + EventEntry measure = startQueryMeasure(params, commandSql.getSqlString()); + try (var commandStatement = connection.prepareStatement(preparedSql.getSqlString())) { + applyStatementParams(commandStatement, preparedSql.getParameters()); + int result = commandStatement.executeUpdate(); + measure.setResult(result); + return result; + } catch (SQLException ex) { + logger.debug("DB reported failure preparing command: {} with params: {} error code: {} sqlstate: {} " + + "message: {}", commandSql.getSqlString(), params, ex.getErrorCode(), ex.getSQLState(), ex.getMessage(), ex); + throw new InternalServerErrorException("DB reported failure preparing command."); + } finally { + measure.end(); + } + } + + /** + * Start smart event measure for a query defined by the given params and parsed SQL. + * + * @param params query parameters + * @param querySql parsed query SQL + * @return smart event measure + */ + protected final EventEntry startQueryMeasure(Map params, String querySql) { + String queryId = (String) params.get(QUERY_ID); + return Publisher.start(Name.get(queryId != null + ? EVENT_RAW_QUERY_PREFIX + queryId + : EVENT_RAW_QUERY_PREFIX + "_query_expression"), querySql, null); + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterCollector.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterCollector.java new file mode 100644 index 0000000000..e3b17d1028 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterCollector.java @@ -0,0 +1,68 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.statement; + +import java.util.Map; +import java.util.TreeMap; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Simple convenient map wrapper for collecting sequence of named SQL parameters. + */ +public class NamedParameterCollector { + + private final Map counters = new TreeMap(); + + private final Map parameters; + + /** + * Create new collector with the backing parameter map. + * + * @param parameters parameter map that will store collected parameter token-value pairs + */ + public NamedParameterCollector(Map parameters) { + this.parameters = parameters; + } + + /** + * Register parameter value with the given token name prefix. + * + * @param prefix parameter token name prefix + * @param parameterValue parameter value to register + * @return generated token name + */ + public String register(String prefix, Object parameterValue) { + String parameterKey = generate(prefix); + parameters.put(parameterKey, parameterValue); + return parameterKey; + } + + /** + * Generate parameter name with the given token name prefix. + * + * @param prefix parameter token name prefix + * @return generated token name + */ + public String generate(String prefix) { + var counter = counters.get(prefix); + if (counter == null) { + counter = new AtomicInteger(); + counters.put(prefix, counter); + } + return prefix + counter.incrementAndGet(); + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterSql.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterSql.java new file mode 100644 index 0000000000..685058a164 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterSql.java @@ -0,0 +1,128 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.statement; + +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import org.forgerock.openidm.repo.util.TokenHandler; + +/** + * Value class for a SQL statement with named parameter placeholders. + * + *

+ * Example SQL statement: + * SELECT * FROM foo WHERE name = ${name} OR value = ${int:value} + * + *

+ * Named parameters (tokens) are either unqualified (string type) or with type specified prefix: + * + *

    + *
  • ${int:NAME} – integer type parameter + *
  • ${list:NAME} – list based parameter (will be exploded based on the input list length) + *
+ * + *

+ * Parsed SQL statement contains all non-list parameters replaced with ?. List parameter + * replacement is performed before prepared statement creation when the actual number of substituted + * values is known. + */ +public class NamedParameterSql { + + /** + * Integer parameter prefix. + */ + public static final String PREFIX_INT = "int"; + + /** + * List parameter prefix. + */ + public static final String PREFIX_LIST = "list"; + + private final String sqlString; + + private final List paramTokens; + + public NamedParameterSql(String sql) { + TokenHandler tokenHandler = new TokenHandler(); + + this.sqlString = tokenHandler.replaceTokens(sql, "?", PREFIX_LIST); + this.paramTokens = tokenHandler.extractTokens(sql).stream() + .map(NamedParameterToken::parse).collect(Collectors.toList()); + } + + /** + * Get the SQL string with simple tokens replaced by ?. + * + * @return parsed SQL string + */ + public String getSqlString() { + return sqlString; + } + + /** + * Get parsed named SQL parameter tokens. + * + * @return list with SQL parameter tokens + */ + public List getParamTokens() { + return paramTokens; + } + + @Override + public String toString() { + // for logging purposes + return sqlString; + } + + /** + * Parse the given SQL statement into a {@link NamedParameterSql}. + * + *

+ * All parameters ${param} are interpreted as named query parameters that + * resolve to prepared statement parameters. Parameters representing database identifiers + * MUST be resolved before calling this method. + * + * @param sql the SQL statement to parse + * @return parsed SQL statement instance + */ + public static NamedParameterSql parse(String sql) { + return parse(sql, null); + } + + /** + * Parse the given SQL statement into a {@link NamedParameterSql} after replacing the + * specified static tokens. + * + *

+ * CAUTION: Replaced tokens are meant to be database identifier that does not + * need any escaping. + * + * @param sql the SQL statement to parse + * @param replacements replacement tokens (database identifiers) + * @return parsed SQL statement instance + */ + public static NamedParameterSql parse(String sql, Map replacements) { + TokenHandler tokenHandler = new TokenHandler(); + + if (replacements != null) { + sql = tokenHandler.replaceSomeTokens(sql, replacements); + } + + return new NamedParameterSql(sql); + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterSupport.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterSupport.java new file mode 100644 index 0000000000..ccee1bd31c --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterSupport.java @@ -0,0 +1,182 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.statement; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import java.util.stream.Collectors; +import org.forgerock.json.resource.BadRequestException; +import org.forgerock.json.resource.InternalServerErrorException; +import org.forgerock.openidm.repo.util.TokenHandler; + +/** + * Convenient utility methods for handling {@link NamedParameterSql} statements. + */ +public class NamedParameterSupport { + + /** + * Collect named SQL parameters from the provided request parameter map. + * + *

+ * If necessary, type conversion is performed (only string values are considered for type conversion). + * Parameters are collected under their fully qualified token name as they might be used multiple times + * with different type hints in the query. + * + * @param params request parameters + * @param parsedSql parsed SQL string + * @param sqlParams result SQL parameter collection + */ + public static void collectSqlParams(Map params, NamedParameterSql parsedSql, Map sqlParams) { + for (NamedParameterToken paramType : parsedSql.getParamTokens()) { + // no need to prepare the same parameter multiple times + if (sqlParams.containsKey(paramType.getToken())) { + continue; + } + + Object paramValue = params.get(paramType.getName()); + + // convert string based values if necessary + if (paramValue instanceof String) { + String stringValue = (String) paramValue; + if (paramType.isList()) { + paramValue = List.of(stringValue.split(",")).stream() + .map(value -> convertSqlParam(value, paramType.getJavaType())) + .collect(Collectors.toList()); + } else { + paramValue = convertSqlParam((String) paramValue, paramType.getJavaType()); + } + + } + + sqlParams.put(paramType.getToken(), paramValue); + } + } + + /** + * Convert request parameter value to the required java type. + * + * @param value request parameter value + * @param type target parameter java type + * @return converted parameter value + */ + private static Object convertSqlParam(String value, Class type) { + if (type == Integer.class) { + return Integer.valueOf(value); + } else if (type == null) { + return value; // leave the type as is + } else { + throw new IllegalStateException("Unknown parameter type " + type); + } + } + + /** + * Prepare SQL string by substituting named parameters with a list of a positional parameters. Null parameter + * values are not supported by this method. + * + * @param parsedSql parsed SQL string with named parameters + * @param sqlParams named SQL parameter map + * @return prepared SQL string with its positional parameters + * @throws BadRequestException + */ + public static PreparedSql prepareSqlString(NamedParameterSql parsedSql, Map sqlParams) + throws BadRequestException { + String sqlString = parsedSql.getSqlString(); + + Map listSizes = new TreeMap(); + + List resultParams = new ArrayList<>(); + for (var paramType : parsedSql.getParamTokens()) { + // try full parameter token name first + Object value = sqlParams.get(paramType.getToken()); + + // search for simple parameter name as fallback + if (value == null) { + value = sqlParams.get(paramType.getName()); + } + + // do not allow null values + if (value == null) { + throw new BadRequestException("Missing entry in params passed to query for token " + + paramType.getToken()); + } + + if (value instanceof List) { + listSizes.put(paramType.getToken(), ((List) value).size()); + ((List) value).forEach(resultParams::add); + } else { + resultParams.add(value); + } + } + + if (!listSizes.isEmpty()) { + sqlString = new TokenHandler().replaceListTokens(sqlString, listSizes, "?"); + } + + return new PreparedSql(sqlString, resultParams); + } + + /** + * Set prepared statement parameters based on the provided value types. Null values are not supported and + * will cause {@link InternalServerErrorException} + * + * @param statement the statement that should be populated with the provided parameters + * @param parameters the list of parameters to apply + * @throws InternalServerErrorException in case of invalid parameter type + * @throws SQLException in case of DB failure + */ + // TODO This method will need to be updated to support vendor specific SQL mapping (see PreparedSql's TODO) + public static void applyStatementParams(PreparedStatement statement, List parameters) + throws InternalServerErrorException, SQLException { + for (int i = 0; i < parameters.size(); i++) { + applyStatementParameter(statement, i + 1, parameters.get(i)); + } + } + + /** + * Set prepared statement parameter based on the value type. Null values are not supported and will cause + * {@link InternalServerErrorException}. + * + * @param statement the statement for which the parameter should set + * @param index the parameter index + * @param value the parameter value to set + * @throws InternalServerErrorException in case of invalid parameter type + * @throws SQLException in case of DB failure + */ + private static void applyStatementParameter(PreparedStatement statement, int index, Object value) + throws InternalServerErrorException, SQLException { + if (value instanceof Integer) { + statement.setInt(index, (Integer) value); + } else if (value instanceof Long) { + statement.setLong(index, (Long) value); + } else if (value instanceof Float) { + statement.setFloat(index, (Float) value); + } else if (value instanceof Double) { + statement.setDouble(index, (Double) value); + } else if (value instanceof Boolean) { + statement.setBoolean(index, (Boolean) value); + } else if (value instanceof String) { + statement.setString(index, (String) value); + } else { + var type = value != null ? value.getClass().getName() : "null"; + throw new InternalServerErrorException("Unsupported parameter type: " + type); + } + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterToken.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterToken.java new file mode 100644 index 0000000000..9800a6c9e2 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterToken.java @@ -0,0 +1,115 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.statement; + +/** + * Information about single named SQL parameter as parsed from parameter token name. + * + * @see NamedParameterSql + */ +public class NamedParameterToken { + + private final String token; + + private final String name; + + private final Class javaType; + + private final boolean list; + + /** + * Create new parameter token information. + * + * @param token the original token name + * @param name the name of the parameter + * @param javaType the parameter's value java type + * @param list whether the parameter represents list of values + */ + public NamedParameterToken(String token, String name, Class javaType, boolean list) { + this.token = token; + this.name = name; + this.javaType = javaType; + this.list = list; + } + + /** + * Get the full parameter token name. + * + * @return the full token name + */ + public String getToken() { + return token; + } + + /** + * Get simple parameter name (i.e. parameter name without its type hints). + * + * @return the parameter name. + */ + public String getName() { + return name; + } + + /** + * Get parameter's value java type used to convert String parameter values. + * + * @return the parameter's value java type + */ + public Class getJavaType() { + return javaType; + } + + /** + * Whether the parameter is a collection parameter that should be expanded when preparing the actual statement. + * + * @return true if the parameter is collection based parameter + */ + public boolean isList() { + return list; + } + + /** + * Parse parameter token name into a parameter token value object. + * + *

+ * Parameter token name (from ${token-name}) can contain: + * + *

    + *
  • list: – list type prefix (optional) + *
  • int: – integer type prefix (optional) + *
  • name – actual parameter name + * + * @param tokenName the parameter token name + * @return the parsed parameter token + */ + public static NamedParameterToken parse(String tokenName) { + String name = tokenName; + boolean list = false; + if (name.startsWith(NamedParameterSql.PREFIX_LIST + ":")) { + list = true; + name = name.substring(NamedParameterSql.PREFIX_LIST.length() + 1); + } + + Class javaType = null; + if (name.startsWith(NamedParameterSql.PREFIX_INT + ":")) { + javaType = Integer.class; + name = name.substring(NamedParameterSql.PREFIX_INT.length() + 1); + } + + return new NamedParameterToken(tokenName, name, javaType, list); + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/PreparedSql.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/PreparedSql.java new file mode 100644 index 0000000000..5f31775ba8 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/PreparedSql.java @@ -0,0 +1,44 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.statement; + +import java.util.List; + +/** + * Simple convenient value class for prepared SQL string with it's positional parameters. This is intermediate + * product when processing SQL strings before preparing statements through the active JDBC connection. + */ +// TODO Introduce parameter value class if we need SQL types alongside the parameter values. +public class PreparedSql { + + private final String sqlString; + + private final List parameters; + + public PreparedSql(String sqlString, List parameters) { + this.sqlString = sqlString; + this.parameters = parameters; + } + + public String getSqlString() { + return sqlString; + } + + public List getParameters() { + return parameters; + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/DB2GenericTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/DB2GenericTableHandler.java new file mode 100644 index 0000000000..e281eb92f8 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/DB2GenericTableHandler.java @@ -0,0 +1,48 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.util.Map; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.SQLBuilder; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.GenericTableHandler; + +/** + * DB2 database {@link GenericTableHandler} implementation. + */ +public class DB2GenericTableHandler extends GenericTableHandler { + + public DB2GenericTableHandler( + String schemaName, + JsonValue tableConfig, + Map queryConfig, + Map commandConfig, + int batchSize, + SQLExceptionHandler exceptionHandler) { + super(schemaName, tableConfig, queryConfig, commandConfig, batchSize, exceptionHandler); + } + + @Override + protected SQLBuilder createSqlBuilder() { + return new SQLBuilder() { + @Override + public String toSQL() { + var innerSql = + "SELECT " + + "obj.fullobject, " + + "ROW_NUMBER() OVER (" + getOrderByClause().toSQL() + " ) \"__rn\" " + + getFromClause().toSQL() + + getJoinClause().toSQL() + + getWhereClause().toSQL(); + return "SELECT fullobject FROM (" + innerSql +") " + + "WHERE " + + "\"__rn\" BETWEEN " + + "(${int:_pagedResultsOffset} + 1) AND " + + "(${int:_pagedResultsOffset} + ${int:_pageSize}) " + + "ORDER BY \"__rn\""; + } + }; + } + + // XXX query filter visitor using TO_NUMBER() function for numeric assertions + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/DB2MappedTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/DB2MappedTableHandler.java new file mode 100644 index 0000000000..540f087bae --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/DB2MappedTableHandler.java @@ -0,0 +1,63 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.util.Map; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.SQLBuilder; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.MappedTableHandler; + +/** + * DB2 database {@link MappedTableHandler} implementation. + */ +public class DB2MappedTableHandler extends MappedTableHandler { + + public DB2MappedTableHandler( + String schemaName, + String tableName, + JsonValue columnMapping, + Map queryConfig, + Map commandConfig, + SQLExceptionHandler exceptionHandler) { + super(schemaName, tableName, columnMapping, queryConfig, commandConfig, exceptionHandler); + } + + @Override + protected SQLBuilder createSqlBuilder() { + return new SQLBuilder() { + @Override + public String toSQL() { + var innerSql = + "SELECT " + + getColumns().toSQL() + ", " + + "ROW_NUMBER() OVER (" + getOrderByClause().toSQL() + " ) \"__rn\" " + + getFromClause().toSQL() + + getJoinClause().toSQL() + + getWhereClause().toSQL(); + return "SELECT * FROM (" + innerSql +") " + + "WHERE " + + "\"__rn\" BETWEEN " + + "(${int:_pagedResultsOffset} + 1) AND " + + "(${int:_pagedResultsOffset} + ${int:_pageSize}) " + + "ORDER BY \"__rn\""; + } + }; + } + + // XXX query filter visitor using TO_NUMBER() function for numeric assertions + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/H2GenericTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/H2GenericTableHandler.java new file mode 100644 index 0000000000..403241d0e7 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/H2GenericTableHandler.java @@ -0,0 +1,42 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.util.Map; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.GenericTableHandler; + +/** + * H2 database {@link GenericTableHandler} implementation. + */ +public class H2GenericTableHandler extends GenericTableHandler { + + public H2GenericTableHandler( + String schemaName, + JsonValue tableConfig, + Map queryConfig, + Map commandConfig, + int batchSize, + SQLExceptionHandler exceptionHandler) { + super(schemaName, tableConfig, queryConfig, commandConfig, batchSize, exceptionHandler); + } + + @Override + protected Map initializeImplicitSql() { + var result = super.initializeImplicitSql(); + result.put(ImplicitSqlType.CREATE, + "INSERT INTO ${_dbSchema}.${_mainTable} (" + + "objecttypes_id, objectid, rev, fullobject" + + ") VALUES (" + + "?, ?, ?, ? FORMAT JSON" + + ")"); + result.put(ImplicitSqlType.UPDATE, + "UPDATE ${_dbSchema}.${_mainTable} obj " + + "SET " + + "obj.objectid = ?, " + + "obj.rev = ?, " + + "obj.fullobject = ? FORMAT JSON " + + "WHERE obj.id = ?"); + return result; + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/H2MappedTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/H2MappedTableHandler.java new file mode 100644 index 0000000000..962f32a2b0 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/H2MappedTableHandler.java @@ -0,0 +1,61 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.util.Map; +import java.util.stream.Collectors; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.MappedTableHandler; + +/** + * H2 database {@link MappedTableHandler} implementation. + */ +public class H2MappedTableHandler extends MappedTableHandler { + + public H2MappedTableHandler( + String schemaName, + String tableName, + JsonValue columnMapping, + Map queryConfig, + Map commandConfig, + SQLExceptionHandler exceptionHandler) { + super(schemaName, tableName, columnMapping, queryConfig, commandConfig, exceptionHandler); + } + + @Override + protected Map initializeImplicitSql() { + var result = super.initializeImplicitSql(); + result.put(ImplicitSqlType.CREATE, + "INSERT INTO ${_dbSchema}.${_table} (" + + columnMapping.values().stream().map(config -> config.columnName) + .collect(Collectors.joining(", ")) + + ") VALUES (" + + columnMapping.values().stream() + .map(config -> config.isJson() ? "? FORMAT JSON" : "?") + .collect(Collectors.joining(", ")) + +")"); + result.put(ImplicitSqlType.UPDATE, + "UPDATE ${_dbSchema}.${_table} " + + "SET " + + columnMapping.values().stream() + .map(config -> config.columnName + (config.isJson() ? " = ? FORMAT JSON" : " = ?")) + .collect(Collectors.joining(", ")) + + " WHERE objectid = ?"); + return result; + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MSSQLGenericTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MSSQLGenericTableHandler.java new file mode 100644 index 0000000000..66895119aa --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MSSQLGenericTableHandler.java @@ -0,0 +1,75 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.util.Map; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.SQLBuilder; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.GenericTableHandler; + +/** + * MSSQL database {@link GenericTableHandler} implementation. + */ +public class MSSQLGenericTableHandler extends GenericTableHandler { + + /** + * Max length of searchable properties for MSSQL. + * Anything larger than 195 will overflow the max index size and error. + */ + private static final int MSSQL_SEARCHABLE_LENGTH = 195; + + public MSSQLGenericTableHandler( + String schemaName, + JsonValue tableConfig, + Map queryConfig, + Map commandConfig, + int batchSize, + SQLExceptionHandler exceptionHandler) { + super(schemaName, tableConfig, queryConfig, commandConfig, batchSize, exceptionHandler); + } + + @Override + protected Map initializeImplicitSql() { + var result = super.initializeImplicitSql(); + result.put(ImplicitSqlType.READFORUPDATE, + "SELECT obj.* " + + "FROM ${_dbSchema}.${_mainTable} obj " + + "WITH (UPDLOCK, ROWLOCK) " + + "WHERE " + + "obj.objecttypes_id = (" + + "SELECT id FROM ${_dbSchema}.objecttypes objtype " + + "WHERE objtype.objecttype = ?" + + ") AND " + + "obj.objectid = ?"); + return result; + } + + @Override + protected int getSearchableLength() { + return MSSQL_SEARCHABLE_LENGTH; + } + + @Override + protected SQLBuilder createSqlBuilder() { + return new SQLBuilder() { + @Override + public String toSQL() { + var innerSql = + "SELECT " + + "obj.fullobject, " + + "ROW_NUMBER() OVER (" + getOrderByClause().toSQL() + " ) __rn " + + getFromClause().toSQL() + + getJoinClause().toSQL() + + getWhereClause().toSQL(); + return "WITH results AS (" + innerSql +") " + + "SELECT fullobject FROM results WHERE " + + "__rn BETWEEN " + + "(${int:_pagedResultsOffset} + 1) AND " + + "(${int:_pagedResultsOffset} + ${int:_pageSize}) " + + "ORDER BY __rn"; + } + }; + } + + // XXX query filter visitor using ISNUMERIC() function for numeric assertions + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MSSQLMappedTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MSSQLMappedTableHandler.java new file mode 100644 index 0000000000..a183f974d1 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MSSQLMappedTableHandler.java @@ -0,0 +1,69 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.util.Map; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.SQLBuilder; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.MappedTableHandler; + +/** + * MSSQL database {@link MappedTableHandler} implementation. + */ +public class MSSQLMappedTableHandler extends MappedTableHandler { + + public MSSQLMappedTableHandler( + String schemaName, + String tableName, + JsonValue columnMapping, + Map queryConfig, + Map commandConfig, + SQLExceptionHandler exceptionHandler) { + super(schemaName, tableName, columnMapping, queryConfig, commandConfig, exceptionHandler); + } + + @Override + protected Map initializeImplicitSql() { + var result = super.initializeImplicitSql(); + result.put(ImplicitSqlType.READFORUPDATE, + "SELECT * FROM ${_dbSchema}.${_table} WITH (UPDLOCK, ROWLOCK) WHERE objectid = ?"); + return result; + } + + @Override + protected SQLBuilder createSqlBuilder() { + return new SQLBuilder() { + @Override + public String toSQL() { + var innerSql = + "SELECT " + + getColumns().toSQL() + ", " + + "ROW_NUMBER() OVER (" + getOrderByClause().toSQL() + " ) __rn " + + getFromClause().toSQL() + + getJoinClause().toSQL() + + getWhereClause().toSQL(); + return "WITH results AS (" + innerSql +") " + + "SELECT * FROM results WHERE " + + "__rn BETWEEN " + + "(${int:_pagedResultsOffset} + 1) AND " + + "(${int:_pagedResultsOffset} + ${int:_pageSize}) " + + "ORDER BY __rn"; + } + }; + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MySQLGenericTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MySQLGenericTableHandler.java new file mode 100644 index 0000000000..0d4f557e16 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MySQLGenericTableHandler.java @@ -0,0 +1,33 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.util.Map; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.GenericTableHandler; + +/** + * MySQL database {@link GenericTableHandler} implementation. + */ +public class MySQLGenericTableHandler extends GenericTableHandler { + + /** + * Max allowed searchable length with default settings and utf8mb4 encoding. + */ + private static final int MYSQL_SEARCHABLE_LENGTH = 768; + + public MySQLGenericTableHandler( + String schemaName, + JsonValue tableConfig, + Map queryConfig, + Map commandConfig, + int batchSize, + SQLExceptionHandler exceptionHandler) { + super(schemaName, tableConfig, queryConfig, commandConfig, batchSize, exceptionHandler); + } + + @Override + protected int getSearchableLength() { + return MYSQL_SEARCHABLE_LENGTH; + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MySQLMappedTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MySQLMappedTableHandler.java new file mode 100644 index 0000000000..7640d67dde --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MySQLMappedTableHandler.java @@ -0,0 +1,38 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.util.Map; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.MappedTableHandler; + +/** + * MySQL database {@link MappedTableHandler} implementation. + */ +public class MySQLMappedTableHandler extends MappedTableHandler { + + public MySQLMappedTableHandler( + String schemaName, + String tableName, + JsonValue columnMapping, + Map queryConfig, + Map commandConfig, + SQLExceptionHandler exceptionHandler) { + super(schemaName, tableName, columnMapping, queryConfig, commandConfig, exceptionHandler); + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/OracleGenericTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/OracleGenericTableHandler.java new file mode 100644 index 0000000000..0dffff09e9 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/OracleGenericTableHandler.java @@ -0,0 +1,57 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.Map; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.SQLBuilder; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.GenericTableHandler; + +/** + * Oracle database {@link GenericTableHandler} implementation. + */ +public class OracleGenericTableHandler extends GenericTableHandler { + + public OracleGenericTableHandler( + String schemaName, + JsonValue tableConfig, + Map queryConfig, + Map commandConfig, + int batchSize, + SQLExceptionHandler exceptionHandler) { + super(schemaName, tableConfig, queryConfig, commandConfig, batchSize, exceptionHandler); + } + + @Override + protected SQLBuilder createSqlBuilder() { + return new SQLBuilder() { + @Override + public String toSQL() { + var innerSql = + "SELECT " + + "obj.fullobject, " + + "ROW_NUMBER() OVER (" + getOrderByClause().toSQL() + " ) \"__rn\" " + + getFromClause().toSQL() + + getJoinClause().toSQL() + + getWhereClause().toSQL(); + return "SELECT fullobject FROM (" + innerSql +") " + + "WHERE " + + "\"__rn\" BETWEEN " + + "(${int:_pagedResultsOffset} + 1) AND " + + "(${int:_pagedResultsOffset} + ${int:_pageSize}) " + + "ORDER BY \"__rn\""; + } + }; + } + + @Override + protected PreparedStatement resolveImplicitStatement(ImplicitSqlType type, boolean keys, Connection connection) + throws SQLException { + return keys + ? connection.prepareStatement(implicitSql.get(type), new String[] { "id" }) + : connection.prepareStatement(implicitSql.get(type)); + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/OracleMappedTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/OracleMappedTableHandler.java new file mode 100644 index 0000000000..d87c4b955e --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/OracleMappedTableHandler.java @@ -0,0 +1,61 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.util.Map; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.SQLBuilder; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.MappedTableHandler; + +/** + * Oracle database {@link MappedTableHandler} implementation. + */ +public class OracleMappedTableHandler extends MappedTableHandler { + + public OracleMappedTableHandler( + String schemaName, + String tableName, + JsonValue columnMapping, + Map queryConfig, + Map commandConfig, + SQLExceptionHandler exceptionHandler) { + super(schemaName, tableName, columnMapping, queryConfig, commandConfig, exceptionHandler); + } + + @Override + protected SQLBuilder createSqlBuilder() { + return new SQLBuilder() { + @Override + public String toSQL() { + var innerSql = + "SELECT " + + getColumns().toSQL() + ", " + + "ROW_NUMBER() OVER (" + getOrderByClause().toSQL() + " ) \"__rn\" " + + getFromClause().toSQL() + + getJoinClause().toSQL() + + getWhereClause().toSQL(); + return "SELECT * FROM (" + innerSql +") " + + "WHERE " + + "\"__rn\" BETWEEN " + + "(${int:_pagedResultsOffset} + 1) AND " + + "(${int:_pagedResultsOffset} + ${int:_pageSize}) " + + "ORDER BY \"__rn\""; + } + }; + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/PostgreSQLGenericTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/PostgreSQLGenericTableHandler.java new file mode 100644 index 0000000000..ac8887a1af --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/PostgreSQLGenericTableHandler.java @@ -0,0 +1,148 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import static org.forgerock.openidm.repo.util.Clauses.where; + +import java.sql.Connection; +import java.sql.SQLException; +import java.util.List; +import java.util.Map; +import org.forgerock.json.JsonPointer; +import org.forgerock.json.JsonValue; +import org.forgerock.json.resource.SortKey; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.SQLBuilder; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.GenericTableHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.statement.NamedParameterCollector; +import org.forgerock.openidm.repo.util.Clauses; +import org.forgerock.openidm.repo.util.StringSQLQueryFilterVisitor; +import org.forgerock.openidm.repo.util.StringSQLRenderer; +import org.forgerock.openidm.util.ResourceUtil; +import org.forgerock.util.query.QueryFilter; + +/** + * PostgreSQL database {@link GenericTableHandler} implementation. + */ +public class PostgreSQLGenericTableHandler extends GenericTableHandler { + + public PostgreSQLGenericTableHandler( + String schemaName, + JsonValue tableConfig, + Map queryConfig, + Map commandConfig, + int batchSize, + SQLExceptionHandler exceptionHandler) { + super(schemaName, tableConfig, queryConfig, commandConfig, batchSize, exceptionHandler); + } + + @Override + protected Map initializeImplicitSql() { + var result = super.initializeImplicitSql(); + + result.put(ImplicitSqlType.CREATE, + "INSERT INTO ${_dbSchema}.${_mainTable} (" + + "objecttypes_id, objectid, rev, fullobject" + + ") VALUES (" + + "?, ?, ?, ?::json" + + ")"); + result.put(ImplicitSqlType.UPDATE, + "UPDATE ${_dbSchema}.${_mainTable} " + + "SET " + + "objectid = ?, " + + "rev = ?, " + + "fullobject = ?::json " + + "WHERE id = ?"); + + return result; + } + + @Override + protected SQLBuilder resolveQueryFilter(QueryFilter queryFilter, List sortKeys, Map sqlParams) { + var builder = createSqlBuilder(); + + var collector = new NamedParameterCollector(sqlParams); + + var visitor = createFilterVisitor(); + builder.addColumn("fullobject::text") + .from("${_dbSchema}.${_mainTable}", "obj") + .join("${_dbSchema}.objecttypes", "objecttypes") + .on(where("obj.objecttypes_id = objecttypes.id") + .and("objecttypes.objecttype = ${_resource}")) + .where(Clauses.where(queryFilter.accept(visitor, collector).toSQL())); + + if (sortKeys != null) { + for (var sortKey : sortKeys) { + // TODO support numeric ordering + var orderBy = resolveJsonExtractPath(sortKey.getField(), collector); + builder.orderBy(orderBy.toString(), sortKey.isAscendingOrder()); + } + } + return builder; + } + + private StringSQLQueryFilterVisitor createFilterVisitor() { + return new StringSQLQueryFilterVisitor() { + + @Override + public StringSQLRenderer visitPresentFilter(NamedParameterCollector collector, JsonPointer field) { + if (ResourceUtil.RESOURCE_FIELD_CONTENT_ID_POINTER.equals(field)) { + // NOT NULL enforced by the schema + return new StringSQLRenderer("obj.objectid IS NOT NULL"); + } else { + return new StringSQLRenderer(resolveJsonExtractPath(field, collector) + " IS NOT NULL"); + } + } + + @Override + public StringSQLRenderer visitValueAssertion(NamedParameterCollector collector, String operand, + JsonPointer field, Object valueAssertion) { + String parameterKey = collector.register("v", valueAssertion); + if (ResourceUtil.RESOURCE_FIELD_CONTENT_ID_POINTER.equals(field)) { + return new StringSQLRenderer("(obj.objectid " + operand + " ${" + parameterKey + "})"); + } + String cast = ""; + if (isNumeric(valueAssertion)) { + cast = "::numeric"; + } else if (isBoolean(valueAssertion)) { + cast = "::boolean"; + } + return new StringSQLRenderer(resolveJsonExtractPath(field, collector).append(cast) + .append(" ").append(operand).append(" ") + .append("${").append(parameterKey).append("}").append(cast).toString()); + } + + private boolean isNumeric(Object value) { + return value instanceof Integer + || value instanceof Long + || value instanceof Float + || value instanceof Double; + } + + private boolean isBoolean(Object value) { + return value instanceof Boolean; + } + + }; + } + + private StringBuilder resolveJsonExtractPath(JsonPointer field, NamedParameterCollector collector) { + StringBuilder result = new StringBuilder("json_extract_path_text(fullobject"); + for (String pathPart : field.toArray()) { + String tokenName = collector.register("p", pathPart); + result.append(", ${").append(tokenName).append("}"); + } + result.append(")"); + return result; + } + + @Override + protected void writeValueProperties(String fullId, long databaseId, JsonValue value, Connection connection) + throws SQLException { + // properties table is not necessary + } + + @Override + protected void clearValueProperties(String fullId, long databaseId, Connection connection) throws SQLException { + // properties table is not necessary + } + +} diff --git a/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/PostgreSQLMappedTableHandler.java b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/PostgreSQLMappedTableHandler.java new file mode 100644 index 0000000000..93fa934bc0 --- /dev/null +++ b/openidm-repo-jdbc/src/main/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/PostgreSQLMappedTableHandler.java @@ -0,0 +1,63 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2024 Wren Security + */ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.util.Map; +import java.util.stream.Collectors; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.MappedTableHandler; + +/** + * PostgreSQL database {@link MappedTableHandler} implementation. + */ +public class PostgreSQLMappedTableHandler extends MappedTableHandler { + + public PostgreSQLMappedTableHandler( + String schemaName, + String tableName, + JsonValue columnMapping, + Map queryConfig, + Map commandConfig, + SQLExceptionHandler exceptionHandler) { + super(schemaName, tableName, columnMapping, queryConfig, commandConfig, exceptionHandler); + } + + @Override + protected Map initializeImplicitSql() { + var result = super.initializeImplicitSql(); + + result.put(ImplicitSqlType.CREATE, + "INSERT INTO ${_dbSchema}.${_table} (" + + columnMapping.values().stream().map(config -> config.columnName) + .collect(Collectors.joining(", ")) + + ") VALUES (" + + columnMapping.values().stream() + .map(config -> config.isJson() ? "?::json" : "?") + .collect(Collectors.joining(", ")) + +")"); + result.put(ImplicitSqlType.UPDATE, + "UPDATE ${_dbSchema}.${_table} " + + "SET " + + columnMapping.values().stream() + .map(config -> config.columnName + " = ?" + (config.isJson() ? "::json" : "")) + .collect(Collectors.joining(", ")) + + " WHERE objectid = ?"); + + return result; + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/datasource/jdbc/impl/JDBCDataSourceServiceTest.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/datasource/jdbc/impl/JDBCDataSourceServiceTest.java index b1888c50fa..6e261c2afe 100644 --- a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/datasource/jdbc/impl/JDBCDataSourceServiceTest.java +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/datasource/jdbc/impl/JDBCDataSourceServiceTest.java @@ -50,17 +50,17 @@ public void testHikariDataSource() { // given JsonValue config = getDataSourceConfig("hikari"); config.add(new JsonPointer("/connectionPool/maximumPoolSize"), 2); - + // when DataSourceService dataSourceService = JDBCDataSourceService.getBootService(config, null); - + //then assertThat(dataSourceService.getDataSource()).isInstanceOf(HikariDataSource.class); assertThat(canExhaustPool(dataSourceService.getDataSource(), 2)).isTrue(); assertThat(dataSourceIsValid(dataSourceService.getDataSource())).isTrue(); } - - @Test + + @Test(enabled = false) public void testBoneCPDataSource() { // given JsonValue config = getDataSourceConfig("bonecp"); @@ -68,10 +68,10 @@ public void testBoneCPDataSource() { config.add(new JsonPointer("/connectionPool/maxConnectionsPerPartition"), 2); config.add(new JsonPointer("/connectionPool/minConnectionsPerPartition"), 1); config.add(new JsonPointer("/connectionPool/acquireIncrement"), 1); - + // when DataSourceService dataSourceService = JDBCDataSourceService.getBootService(config, null); - + // then assertThat(dataSourceService.getDataSource()).isInstanceOf(BoneCPDataSource.class); assertThat(canExhaustPool(dataSourceService.getDataSource(), 2)).isTrue(); @@ -82,16 +82,16 @@ public void testBoneCPDataSource() { public void testNonPoolingDataSource() { // given JsonValue config = getDataSourceConfig(null); - + // when DataSourceService dataSourceService = JDBCDataSourceService.getBootService(config, null); - + // then assertThat(dataSourceService.getDataSource()).isExactlyInstanceOf( NonPoolingDataSourceFactory.NonPoolingDataSource.class); assertThat(dataSourceIsValid(dataSourceService.getDataSource())).isTrue(); } - + private boolean dataSourceIsValid(DataSource ds) { try { ds.getConnection().isValid(5); @@ -100,7 +100,7 @@ private boolean dataSourceIsValid(DataSource ds) { } return true; } - + private JsonValue getDataSourceConfig(String type) { Object poolType = null; if (type != null) { @@ -108,7 +108,7 @@ private JsonValue getDataSourceConfig(String type) { field("type", type) ); } - + return new JsonValue( object( field("driverClass", "org.hsqldb.jdbcDriver"), @@ -123,7 +123,7 @@ private boolean canExhaustPool(DataSource ds, int poolSize) { boolean exhausted = false; int numConnections = 0; Set connections = new HashSet<>(); - + try { while (numConnections <= poolSize) { connections.add(ds.getConnection()); diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/AbstractGenericTableHandlerTest.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/AbstractGenericTableHandlerTest.java new file mode 100644 index 0000000000..b8d3de816e --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/AbstractGenericTableHandlerTest.java @@ -0,0 +1,75 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.handler; + +import static org.forgerock.json.JsonValue.field; +import static org.forgerock.json.JsonValue.json; +import static org.forgerock.json.JsonValue.object; + +import java.util.Map; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; + +/** + * Common superclass for {@link GenericTableHandler} test cases. + */ +public abstract class AbstractGenericTableHandlerTest extends AbstractTableHandlerTest { + + /** + * Table handler constructor value provider. + * @see #createTableHandler() + */ + protected JsonValue getTableConfig() { + return json(object( + field("mainTable", "genericobjects"), + field("propertiesTable", "genericobjectproperties"), + field("properties", object()) + )); + } + + /** + * Table handler constructor value provider. + * @see #createTableHandler() + */ + protected Map getQueryConfig() { + return json(object( + field("sample-query", "SELECT * FROM ${_dbSchema}.${_mainTable} obj WHERE objectid LIKE ${id}"), + field("sample-query-count", "SELECT COUNT(*) AS total FROM ${_dbSchema}.${_mainTable} WHERE objectid LIKE ${id}") + )).asMap(String.class); + } + + /** + * Table handler constructor value provider. + * @see #createTableHandler() + */ + protected Map getCommandConfig() { + return json(object( + field("sample-command", "DELETE FROM ${_dbSchema}.${_mainTable} WHERE objectid = ${id}") + )).asMap(String.class); + } + + /** + * Table handler constructor value provider. + * @see #createTableHandler() + */ + protected int getBatchSize() { + return -1; + } + + /** + * Table handler constructor value provider. + * @see #createTableHandler() + */ + protected SQLExceptionHandler getExceptionHandler() { + return null; + } + + @Override + protected String getTestQueryExpression() { + return "SELECT * FROM wrenidm.genericobjects"; + } + + @Override + protected String getParamQueryExpression() { + return "SELECT * FROM wrenidm.genericobjects WHERE objectid IN (${list:ids})"; + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/AbstractMappedTableHandlerTest.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/AbstractMappedTableHandlerTest.java new file mode 100644 index 0000000000..f0c1d382e4 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/AbstractMappedTableHandlerTest.java @@ -0,0 +1,144 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.handler; + +import static org.forgerock.json.JsonValue.array; +import static org.forgerock.json.JsonValue.field; +import static org.forgerock.json.JsonValue.json; +import static org.forgerock.json.JsonValue.object; +import static org.forgerock.json.resource.ResourcePath.resourcePath; +import static org.forgerock.openidm.repo.jdbc.Constants.OBJECT_ID; +import static org.testng.Assert.assertEquals; + +import java.util.Map; +import org.forgerock.json.JsonValue; +import org.forgerock.json.resource.BadRequestException; +import org.forgerock.openidm.repo.jdbc.Constants; +import org.forgerock.openidm.repo.jdbc.SQLExceptionHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.MappedColumnConfig.ValueType; +import org.testng.annotations.Test; + +/** + * Common superclass for {@link MappedTableHandler} test cases. + */ +public abstract class AbstractMappedTableHandlerTest extends AbstractTableHandlerTest { + + /** + * Table handler constructor value provider. + * @see #createTableHandler() + */ + protected String getTableName() { + return "managedgreeting"; + } + + /** + * Table handler constructor value provider. + * @see #createTableHandler() + */ + protected JsonValue getColumnMapping() { + return json(object( + field("_id", "objectid"), + field("_rev", Constants.RAW_OBJECT_REV), + // string column property + field("name", "name"), + // number column property + field("score", object( + field("column", "ranking"), // intentional name discrepancy + field("type", (FUTURE_MODE ? ValueType.NUMBER : ValueType.STRING).name()) + )), + // boolean column property + field("visible", array("visible", (FUTURE_MODE ? ValueType.BOOLEAN : ValueType.STRING).name())), + // json list column property + field("tags", array("tags", ValueType.JSON_LIST.name())), + // json map column property + field("meta", array("meta", ValueType.JSON_MAP.name())) + )); + } + + /** + * Table handler constructor value provider. + * @see #createTableHandler() + */ + protected Map getQueryConfig() { + return json(object( + field("sample-query", "SELECT * FROM ${_dbSchema}.${_table} obj"), + field("sample-query-count", "SELECT COUNT(*) AS total FROM ${_dbSchema}.${_table}") + )).asMap(String.class); + } + + /** + * Table handler constructor value provider. + * @see #createTableHandler() + */ + protected Map getCommandConfig() { + return json(object( + field("sample-command", "DELETE FROM ${_dbSchema}.${_table} WHERE objectid = ${id}") + )).asMap(String.class); + } + + /** + * Table handler constructor value provider. + * @see #createTableHandler() + */ + protected SQLExceptionHandler getExceptionHandler() { + return null; + } + + @Override + protected void assertResourceValues(Map resource, Map template) { + super.assertResourceValues(resource, template); + } + + @Override + protected String getTestQueryExpression() { + return "SELECT * FROM wrenidm.managedgreeting"; + } + + @Override + protected String getParamQueryExpression() { + return "SELECT * FROM wrenidm.managedgreeting WHERE objectid IN (${list:ids})"; + } + + @Test + public void testQueryFilterLegacy() throws Exception { + createResource("stringified-boolean", Map.of("name", "true")); + createResource("stringified-integer", Map.of("name", "7")); + createResource("stringified-double", Map.of("name", "7.0")); + + var booleanResult = queryResource("name eq true"); + assertEquals(booleanResult.size(), 1); + assertEquals(booleanResult.get(0).get(OBJECT_ID), "stringified-boolean"); + + var integerResult = queryResource("name eq 7"); + assertEquals(integerResult.size(), 1); + assertEquals(integerResult.get(0).get(OBJECT_ID), "stringified-integer"); + + var doubleResult = queryResource("name eq 7.0"); + assertEquals(doubleResult.size(), 1); + assertEquals(doubleResult.get(0).get(OBJECT_ID), "stringified-double"); + } + + @Test( + expectedExceptions = BadRequestException.class, + expectedExceptionsMessageRegExp = "Unmapped.*foobar.*" + ) + public void testStateCheckCreate() throws Exception { + createResource(RESOURCE_ID, Map.of("foobar", "unmapped")); + } + + @Test( + expectedExceptions = BadRequestException.class, + expectedExceptionsMessageRegExp = "Unmapped.*foobar.*" + ) + public void testStateCheckUpdate() throws Exception { + var resource = createResource(RESOURCE_ID, Map.of("name", "hello")); + resource.put("foobar", "unmapped"); + tableHandler.update( + resourcePath(OBJECT_TYPE).child(RESOURCE_ID).toString(), + OBJECT_TYPE, + RESOURCE_ID, + "0", + resource, + connection + ); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/AbstractTableHandlerTest.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/AbstractTableHandlerTest.java new file mode 100644 index 0000000000..2098db0be6 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/AbstractTableHandlerTest.java @@ -0,0 +1,495 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.handler; + +import static org.forgerock.json.JsonValue.field; +import static org.forgerock.json.JsonValue.object; +import static org.forgerock.json.resource.ResourcePath.resourcePath; +import static org.forgerock.openidm.repo.QueryConstants.PAGED_RESULTS_OFFSET; +import static org.forgerock.openidm.repo.QueryConstants.PAGE_SIZE; +import static org.forgerock.openidm.repo.QueryConstants.QUERY_EXPRESSION; +import static org.forgerock.openidm.repo.QueryConstants.QUERY_FILTER; +import static org.forgerock.openidm.repo.QueryConstants.QUERY_ID; +import static org.forgerock.openidm.repo.QueryConstants.SORT_KEYS; +import static org.forgerock.openidm.repo.jdbc.Constants.OBJECT_ID; +import static org.forgerock.openidm.repo.jdbc.Constants.OBJECT_REV; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNotEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; +import static org.testng.Assert.fail; + +import com.google.common.collect.Lists; +import java.lang.reflect.Method; +import java.sql.Connection; +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import org.forgerock.json.resource.NotFoundException; +import org.forgerock.json.resource.PreconditionFailedException; +import org.forgerock.json.resource.QueryFilters; +import org.forgerock.json.resource.ResourceResponse; +import org.forgerock.json.resource.SortKey; +import org.forgerock.openidm.repo.jdbc.Constants; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.testng.ITestResult; +import org.testng.annotations.AfterMethod; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +/** + * Base test cases for {@link TableHandler} implementations. + * + *

    + * All tests are written for a single object type only and are run in sequence where + * the first test creates the actual object that is being used by the subsequent tests. + */ +public abstract class AbstractTableHandlerTest { + + /** + * Whether to use legacy behavior (legacy handlers and no support for number and boolean data types). + */ + protected static final boolean LEGACY_MODE = true; + + /** + * Whether to use support for number and boolean data types (this will be possible after dropping legacy mode). + */ + protected static final boolean FUTURE_MODE = false; + + protected static final String OBJECT_TYPE = "greeting"; + + protected static final String RESOURCE_ID = "hello"; + + protected final TableHandler tableHandler; + + protected Connection connection; + + public AbstractTableHandlerTest() { + try { + tableHandler = createTableHandler(); + } catch (Exception ex) { + throw new IllegalStateException("Error creating table handler", ex); + } + } + + /** + * Get active database + * + * @return the database connection to a fully initialized database + */ + protected abstract Connection getConnection() throws Exception; + + @BeforeMethod + public void beginTransaction(Method method) throws Exception { + connection = getConnection(); + connection.setAutoCommit(false); + } + + @AfterMethod + public void rollbackTransaction(ITestResult result, Method method) throws Exception { + connection.rollback(); + connection = null; + } + + /** + * Get database schema name. + */ + protected String getSchemaName() { + return "wrenidm"; + } + + /** + * Create table handler that will be tested. + */ + protected abstract TableHandler createTableHandler() throws Exception; + + /** + * Create new resource with the given ID and a set of properties. + */ + protected Map createResource(String id, Map properties) throws Exception { + Map resource = new LinkedHashMap<>(properties); + tableHandler.create( + resourcePath(OBJECT_TYPE).child(id).toString(), + OBJECT_TYPE, + id, + resource, + connection + ); + return resource; + } + + /** + * Read resource state with the given ID from the database. + */ + protected ResourceResponse readResource(String id) throws Exception { + return tableHandler.read( + resourcePath(OBJECT_TYPE).child(id).toString(), + OBJECT_TYPE, + id, + connection + ); + } + + /** + * Run query filter against using the current table handler with no additional parameters. + */ + protected List> queryResource(String queryFilter) throws Exception { + Map params = Map.of( + QUERY_FILTER, QueryFilters.parse(queryFilter), + PAGED_RESULTS_OFFSET, 0, + PAGE_SIZE, 0 + ); + return queryResource(params); + } + + /** + * Run query filter against using the current table handler. + */ + protected List> queryResource(Map params) throws Exception { + return tableHandler.query( + OBJECT_TYPE, + LEGACY_MODE ? new HashMap<>(params) : params, + connection + ); + } + + /** + * Assert that resource properties match the given template (including value types). + */ + protected void assertResourceValues(Map resource, Map template) { + for (String propertyName : template.keySet()) { + var resourceValue = resource.get(propertyName); + var templateValue = template.get(propertyName); + assertEquals(resourceValue, templateValue, "property " + propertyName); + } + } + + @Test + public void testCreate() throws Exception { + Map resource = createResource(RESOURCE_ID, Map.of( + "name", "HELLO", + "score", 7, + "visible", true)); + + assertEquals(resource.get(OBJECT_ID), RESOURCE_ID); + assertEquals(resource.get(OBJECT_REV), "0"); + } + + @Test + public void testRead() throws Exception { + var template = Map.of( + "name", "HELLO", + "score", FUTURE_MODE ? 7 : "7", + "visible", FUTURE_MODE ? true : "true", + "tags", List.of("foo", "bar"), + "meta", Map.of("owner", "john")); + createResource(RESOURCE_ID, template); + + var resource = readResource(RESOURCE_ID); + assertNotNull(resource); + assertEquals(resource.getId(), RESOURCE_ID); + assertEquals(resource.getRevision(), "0"); + + assertResourceValues(resource.getContent().asMap(), template); + } + + @Test + public void testReadNullable() throws Exception { + createResource(RESOURCE_ID, Map.of()); + + var resource = readResource(RESOURCE_ID); + assertNotNull(resource); + assertEquals(resource.getId(), RESOURCE_ID); + for (String property : Arrays.asList("name", "score", "visible")) { + var value = resource.getContent().get(property); + assertNotNull(value); + assertNull(value.getObject()); + } + } + + @Test(expectedExceptions = NotFoundException.class) + public void testReadNonExistent() throws Exception { + readResource("non-existent"); + } + + @Test + public void testQueryFilterSimple() throws Exception { + var template = Map.of( + "name", "HELLO", + "score", FUTURE_MODE ? 7.0 : "7", + "visible", FUTURE_MODE ? true : "true", + "tags", List.of("foo", "bar"), + "meta", Map.of("owner", "john")); + createResource(RESOURCE_ID, template); + + var result = queryResource("_id eq '" + RESOURCE_ID + "'"); + assertNotNull(result); + assertEquals(result.size(), 1); + + assertResourceValues(result.get(0), template); + } + + @Test + public void testQueryFilterComplex() throws Exception { + createResource(RESOURCE_ID, Map.of("name", "HELLO TO", "score", 70, "visible", true)); + createResource("with-different-name", Map.of("name", "GOOD BYE", "score", 70, "visible", true)); + createResource("with-lower-score", Map.of("name", "HELLO TO", "score", 8, "visible", true)); + createResource("with-not-visible", Map.of("name", "HELLO TO", "score", 80, "visible", false)); + + var resultIds = queryResource("name sw 'HELLO' and score eq 70 and visible eq true").stream() + .map(resource -> resource.get(OBJECT_ID)) + .collect(Collectors.toSet()); + assertEquals(resultIds, Set.of(RESOURCE_ID)); + } + + @Test(enabled = FUTURE_MODE) + public void testQueryFilterNumber() throws Exception { + createResource("lower-value", Map.of("score", 9)); + createResource("lower-decimal", Map.of("score", 9.1)); + createResource(RESOURCE_ID, Map.of("score", 70)); + createResource("higher-value", Map.of("score", 90)); + createResource("higher-decimal", Map.of("score", 90.1)); + createResource("null-value", object(field("score", null))); + + var resultIds = queryResource("score ge 70").stream() + .map(resource -> resource.get(OBJECT_ID)) + .collect(Collectors.toSet()); + assertEquals(resultIds, Set.of(RESOURCE_ID, "higher-value", "higher-decimal")); + } + + @Test + public void testQueryFilterBoolean() throws Exception { + createResource(RESOURCE_ID, Map.of("visible", true)); + createResource("not-visible", Map.of("visible", false)); + createResource("null-visible", object(field("visible", null))); + + var resultIds = queryResource("visible eq true").stream() + .map(resource -> resource.get(OBJECT_ID)) + .collect(Collectors.toSet()); + assertEquals(resultIds, Set.of(RESOURCE_ID)); + } + + @Test + public void testQueryFilterEmpty() throws Exception { + createResource(RESOURCE_ID, Map.of("name", "HELLO")); + + var result = queryResource("name eq 'non-existent'"); + assertTrue(result.isEmpty()); + } + + @Test + public void testQueryPaging() throws Exception { + for (int i = 0; i < 10; i++) { + createResource("paging-" + i, Map.of("name", "HELLO " + i)); + } + + Map params = Map.of( + QUERY_FILTER, QueryFilters.parse("_id sw 'paging-'"), + SORT_KEYS, List.of(SortKey.ascendingOrder(OBJECT_ID)), + PAGED_RESULTS_OFFSET, 3, + PAGE_SIZE, 3 + ); + var matchedIds = queryResource(params).stream() + .map(resource -> resource.get(OBJECT_ID)) + .collect(Collectors.toSet()); + assertEquals(matchedIds, Set.of("paging-3", "paging-4", "paging-5")); + } + + @Test + public void testQuerySorting() throws Exception { + for (int i = 0; i < 10; i++) { + createResource("sorting-" + i, Map.of("name", "HELLO " + i)); + } + + Map ascendingParams = Map.of( + QUERY_FILTER, QueryFilters.parse("_id sw 'sorting-'"), + PAGE_SIZE, 0, + SORT_KEYS, List.of(SortKey.ascendingOrder(OBJECT_ID), SortKey.ascendingOrder("name")) + ); + var ascendingIds = queryResource(ascendingParams).stream() + .map(resource -> resource.get(OBJECT_ID)) + .collect(Collectors.toList()); + + Map descendingParams = Map.of( + QUERY_FILTER, QueryFilters.parse("_id sw 'sorting-'"), + PAGE_SIZE, 0, + SORT_KEYS, List.of(SortKey.descendingOrder(OBJECT_ID)) + ); + var descendingIds = queryResource(descendingParams).stream() + .map(resource -> resource.get(OBJECT_ID)) + .collect(Collectors.toList()); + + assertNotEquals(ascendingIds, descendingIds); + assertEquals(ascendingIds, Lists.reverse(descendingIds)); + } + + @Test + public void testQueryId() throws Exception { + // single matching resource is enough (we are not testing DB engines) + createResource(RESOURCE_ID, Map.of("name", "HELLO")); + + var result = queryResource(Map.of( + QUERY_ID, "sample-query", + "id", RESOURCE_ID, + PAGE_SIZE, 0 + )); + assertFalse(result.isEmpty()); + for (var resource : result) { + assertEquals(resource.get("name"), "HELLO"); + } + } + + @Test(dependsOnMethods = "testCreate") + public void testQueryExpression() throws Exception { + // single matching resource is enough (we are not testing DB engines) + createResource(RESOURCE_ID, Map.of("name", "HELLO")); + + var result = queryResource(Map.of( + QUERY_EXPRESSION, getTestQueryExpression(), + PAGE_SIZE, 0 + )); + assertFalse(result.isEmpty()); + } + + /** + * Get test query expression that results in a non-empty result set. + * + * @return the test query expression + */ + protected abstract String getTestQueryExpression(); + + @Test + public void testParamExpansion() throws Exception { + // single matching resource is enough (we are not testing DB engines) + createResource(RESOURCE_ID, Map.of("name", "HELLO")); + + var result = queryResource(Map.of( + QUERY_EXPRESSION, getParamQueryExpression(), + "ids", "ahoy,hello,bonjour", + PAGE_SIZE, 0 + )); + assertFalse(result.isEmpty()); + } + + /** + * Get test query expression that contains ${list:ids} named parameter. + * + * @return the test query expression + */ + protected abstract String getParamQueryExpression(); + + @Test + public void testUpdate() throws Exception { + createResource(RESOURCE_ID, Map.of("name", "HELLO")); + + Map resource = new LinkedHashMap<>(Map.of( + "name", "BONJOUR" + )); + tableHandler.update( + resourcePath(OBJECT_TYPE).child(RESOURCE_ID).toString(), + OBJECT_TYPE, + RESOURCE_ID, + "0", + resource, + connection + ); + assertEquals(resource.get(Constants.OBJECT_REV), "1"); + + var updated = readResource(RESOURCE_ID); + assertEquals(updated.getContent().get("name").asString(), "BONJOUR"); + } + + @Test(expectedExceptions = PreconditionFailedException.class) + public void testUpdateLock() throws Exception { + createResource(RESOURCE_ID, Map.of("name", "HELLO")); + + Map resource = new LinkedHashMap<>(Map.of( + "name", "AHOY" + )); + tableHandler.update( + resourcePath(OBJECT_TYPE).child(RESOURCE_ID).toString(), + OBJECT_TYPE, + RESOURCE_ID, + "-1", + resource, + connection + ); + } + + @Test(enabled = !LEGACY_MODE) + public void testQueryIdCount() throws Exception { + assertNull(tableHandler.queryCount(OBJECT_TYPE, Map.of("_queryId", "non-existing"), connection)); + + createResource(RESOURCE_ID, Map.of("name", "HELLO")); + createResource("alternative", Map.of("name", "GUTEN TAG")); + + Map params = Map.of( + QUERY_ID, "sample-query", + "id", RESOURCE_ID, + PAGE_SIZE, 0 + ); + var result = queryResource(params); + var count = tableHandler.queryCount(OBJECT_TYPE, params, connection); + assertFalse(result.isEmpty()); + assertNotNull(count); + assertEquals(count, result.size()); + } + + @Test(enabled = !LEGACY_MODE) + public void testQueryFilterCount() throws Exception { + createResource(RESOURCE_ID, Map.of("name", "HELLO")); + createResource("alternative", Map.of("name", "GUTEN TAG")); + + assertEquals(tableHandler.queryCount(OBJECT_TYPE, Map.of( + QUERY_FILTER, QueryFilters.parse("name eq 'non-existent'"), + PAGE_SIZE, 0 + ), connection), 0); + + assertEquals(tableHandler.queryCount(OBJECT_TYPE, Map.of( + QUERY_FILTER, QueryFilters.parse("name eq 'HELLO'"), + PAGE_SIZE, 0 + ), connection), 1); + } + + @Test + public void testDelete() throws Exception { + createResource("for-deletion", Map.of("name", "ARRIVEDERCI")); + + assertNotNull(readResource("for-deletion")); + + tableHandler.delete( + resourcePath(OBJECT_TYPE).child("for-deletion").toString(), + OBJECT_TYPE, + "for-deletion", + "0", + connection + ); + + try { + readResource("for-deletion"); + fail("NotFoundException expected"); + } catch (NotFoundException e) { } + } + + @Test + public void testCommand() throws Exception { + createResource("42", Map.of("name", "OREVUAR")); + + Map params = Map.of( + "commandId", "sample-command", + "id", "42" + ); + var result = tableHandler.command( + OBJECT_TYPE, + LEGACY_MODE ? new HashMap<>(params) : params, + connection + ); + assertNotNull(result); + assertEquals(result, 1); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/AbstractTestConnectionProvider.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/AbstractTestConnectionProvider.java new file mode 100644 index 0000000000..5ebce86dda --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/AbstractTestConnectionProvider.java @@ -0,0 +1,74 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.handler; + +import java.lang.reflect.Proxy; +import java.net.URL; +import java.nio.file.Path; +import java.sql.Connection; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Common superclass for database connection providers that want to have custom initialization on the + * first open connection and custom teardown logic when the last connection gets closed. This helps + * with starting and stopping the database only once per test run. + */ +public abstract class AbstractTestConnectionProvider { + + private static final Map CONNECTION_COUNTERS = new HashMap<>(); + + /** + * Request connection for the database. + */ + public final Connection getConnection() throws Exception { + synchronized (getClass()) { + var providerKey = getClass(); + if (!CONNECTION_COUNTERS.containsKey(providerKey)) { + CONNECTION_COUNTERS.put(providerKey, new AtomicInteger()); + } + var counter = CONNECTION_COUNTERS.get(providerKey); + try { + var connection = openConnection(counter.get() == 0); + return (Connection) Proxy.newProxyInstance( + getClass().getClassLoader(), + new Class[] { Connection.class }, + (proxy, method, args) -> { + if (method.getName().equals("close")) { + closeConnection(connection, counter.decrementAndGet() <= 0); + return null; + } else { + return method.invoke(connection, args); + } + }); + } finally { + counter.incrementAndGet(); + } + } + } + + /** + * Open new connection while indicating if it is the first one to be opened. + */ + protected abstract Connection openConnection(boolean first) throws Exception; + + /** + * Close active connection while indicating if it is the last open connection. + */ + protected void closeConnection(Connection connection, boolean last) throws Exception { + connection.close(); // no teardown by default + } + + /** + * Get class-path resource path. + * + *

    Can be used by subclasses to get location of a potential DB initialization script. + */ + protected String getResourcePath(String name) throws Exception { + URL resourceUrl = getClass().getClassLoader().getResource(name); + if (resourceUrl == null) { + throw new IllegalArgumentException("Resource '" + name + "' not found"); + } + return Path.of(resourceUrl.toURI()).toString(); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/HSQLDBGenericTableHandlerTest.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/HSQLDBGenericTableHandlerTest.java new file mode 100644 index 0000000000..3605525052 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/HSQLDBGenericTableHandlerTest.java @@ -0,0 +1,26 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.handler; + +import java.sql.Connection; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.testng.annotations.Test; + +@Test(singleThreaded = true, suiteName = "embedded") +public class HSQLDBGenericTableHandlerTest extends AbstractGenericTableHandlerTest { + + @Override + protected Connection getConnection() throws Exception { + return new HSQLDBTestConnectionProvider().getConnection(); + } + + @Override + protected TableHandler createTableHandler() { + return new GenericTableHandler( + getSchemaName(), + getTableConfig(), + getQueryConfig(), + getCommandConfig(), + getBatchSize(), + getExceptionHandler() + ); + } +} \ No newline at end of file diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/HSQLDBMappedTableHandlerTest.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/HSQLDBMappedTableHandlerTest.java new file mode 100644 index 0000000000..825a911ff2 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/HSQLDBMappedTableHandlerTest.java @@ -0,0 +1,27 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.handler; + +import java.sql.Connection; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.testng.annotations.Test; + +@Test(singleThreaded = true, suiteName = "embedded") +public class HSQLDBMappedTableHandlerTest extends AbstractMappedTableHandlerTest { + + @Override + protected Connection getConnection() throws Exception { + return new HSQLDBTestConnectionProvider().getConnection(); + } + + @Override + protected TableHandler createTableHandler() throws Exception { + return new MappedTableHandler( + getSchemaName(), + getTableName(), + getColumnMapping(), + getQueryConfig(), + getCommandConfig(), + getExceptionHandler() + ); + } + +} \ No newline at end of file diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/HSQLDBTestConnectionProvider.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/HSQLDBTestConnectionProvider.java new file mode 100644 index 0000000000..b04c08206b --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/handler/HSQLDBTestConnectionProvider.java @@ -0,0 +1,20 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.handler; + +import java.sql.Connection; +import java.sql.DriverManager; +import org.hsqldb.cmdline.SqlFile; + +public class HSQLDBTestConnectionProvider extends AbstractTestConnectionProvider { + + @Override + protected Connection openConnection(boolean first) throws Exception { + var connection = DriverManager.getConnection("jdbc:hsqldb:mem:test;shutdown=true", "SA", ""); + if (first) { + var sqlFile = new SqlFile(getClass().getClassLoader().getResource("hsqldb.sql")); + sqlFile.setConnection(connection); + sqlFile.execute(); + } + return connection; + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterSqlTest.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterSqlTest.java new file mode 100644 index 0000000000..d421be5cde --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterSqlTest.java @@ -0,0 +1,69 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.statement; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertTrue; + +import java.util.List; +import java.util.stream.Collectors; +import org.testng.annotations.Test; + +/** + * {@link NamedParameterSql} test case. + */ +public class NamedParameterSqlTest { + + @Test + public void testParseNoParams() { + var parsed = NamedParameterSql.parse("SELECT * FROM hello"); + assertEquals(parsed.getSqlString(), "SELECT * FROM hello"); + assertNotNull(parsed.getParamTokens()); + assertTrue(parsed.getParamTokens().isEmpty()); + } + + @Test + public void testParseWithParams() { + var parsed = NamedParameterSql.parse("SELECT * FROM hello WHERE " + + "value = ${foo} OR value = ${int:bar} OR value IN (${list:baz})"); + + assertEquals(parsed.getSqlString(), "SELECT * FROM hello " + + "WHERE value = ? OR value = ? OR value IN (${list:baz})"); + + var paramTypes = parsed.getParamTokens(); + assertNotNull(paramTypes); + + var paramTokens = paramTypes.stream() + .map(NamedParameterToken::getToken).collect(Collectors.toList()); + assertEquals(paramTokens, List.of("foo", "int:bar", "list:baz")); + } + + @Test + public void testParseParamTokens() { + var simpleParam = NamedParameterToken.parse("foo"); + assertEquals(simpleParam.getToken(), "foo"); + assertEquals(simpleParam.getName(), "foo"); + assertEquals(simpleParam.getJavaType(), null); + assertFalse(simpleParam.isList()); + + var integerParam = NamedParameterToken.parse("int:foo"); + assertEquals(integerParam.getToken(), "int:foo"); + assertEquals(integerParam.getName(), "foo"); + assertEquals(integerParam.getJavaType(), Integer.class); + assertFalse(integerParam.isList()); + + var listParam = NamedParameterToken.parse("list:foo"); + assertEquals(listParam.getToken(), "list:foo"); + assertEquals(listParam.getName(), "foo"); + assertEquals(listParam.getJavaType(), null); + assertTrue(listParam.isList()); + + var listOfIntsParam = NamedParameterToken.parse("list:int:foo"); + assertEquals(listOfIntsParam.getToken(), "list:int:foo"); + assertEquals(listOfIntsParam.getName(), "foo"); + assertEquals(listOfIntsParam.getJavaType(), Integer.class); + assertTrue(listOfIntsParam.isList()); + + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterSupportTest.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterSupportTest.java new file mode 100644 index 0000000000..379680be85 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/statement/NamedParameterSupportTest.java @@ -0,0 +1,26 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.statement; + +import static org.testng.Assert.assertEquals; + +import java.util.List; +import java.util.Map; +import org.testng.annotations.Test; + +/** + * {@link NamedParameterSupport} test case. + */ +public class NamedParameterSupportTest { + + @Test + public void testPrepareSqlString() throws Exception { + var sql = NamedParameterSql.parse("SELECT * FROM hello WHERE id = ${int:foo} AND val IN (${list:bar})"); + + var preparedSql = NamedParameterSupport.prepareSqlString(sql, Map.of( + "foo", 13, + "bar", List.of("world", "universe"))); + + assertEquals(preparedSql.getSqlString(), "SELECT * FROM hello WHERE id = ? AND val IN (?, ?)"); + assertEquals(preparedSql.getParameters(), List.of(13, "world", "universe")); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/DB2GenericTableHandlerIT.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/DB2GenericTableHandlerIT.java new file mode 100644 index 0000000000..a2aaf9c3d9 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/DB2GenericTableHandlerIT.java @@ -0,0 +1,39 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.DB2TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractGenericTableHandlerTest; +import org.testng.annotations.Test; + +@Test(singleThreaded = true, suiteName = "db2") +public class DB2GenericTableHandlerIT extends AbstractGenericTableHandlerTest { + + @Override + protected Connection getConnection() throws Exception { + return new DB2TestConnectionProvider().getConnection(); + } + + @Override + protected TableHandler createTableHandler() throws Exception { + if (LEGACY_MODE) { + return new DB2TableHandler( + getTableConfig(), + getSchemaName(), + JsonValue.json(getQueryConfig()), + JsonValue.json(getCommandConfig()), + getBatchSize(), + getExceptionHandler()); + } + return new DB2GenericTableHandler( + getSchemaName(), + getTableConfig(), + getQueryConfig(), + getCommandConfig(), + getBatchSize(), + getExceptionHandler() + ); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/DB2MappedTableHandlerIT.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/DB2MappedTableHandlerIT.java new file mode 100644 index 0000000000..86d28e70f7 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/DB2MappedTableHandlerIT.java @@ -0,0 +1,41 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.crypto.impl.CryptoServiceImpl; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractMappedTableHandlerTest; +import org.testng.annotations.Test; + +@Test(singleThreaded = true, suiteName = "db2") +public class DB2MappedTableHandlerIT extends AbstractMappedTableHandlerTest { + + @Override + protected Connection getConnection() throws Exception { + return new DB2TestConnectionProvider().getConnection(); + } + + @Override + protected TableHandler createTableHandler() throws Exception { + if (LEGACY_MODE) { + return new org.forgerock.openidm.repo.jdbc.impl.OracleMappedTableHandler( + getTableName(), + getColumnMapping().asMap(), + getSchemaName(), + JsonValue.json(getQueryConfig()), + JsonValue.json(getCommandConfig()), + getExceptionHandler(), + () -> new CryptoServiceImpl()); + + } + return new DB2MappedTableHandler( + getSchemaName(), + getTableName(), + getColumnMapping(), + getQueryConfig(), + getCommandConfig(), + getExceptionHandler() + ); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/DB2TestConnectionProvider.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/DB2TestConnectionProvider.java new file mode 100644 index 0000000000..31538db054 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/DB2TestConnectionProvider.java @@ -0,0 +1,36 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import static org.testng.Assert.assertFalse; + +import java.sql.Connection; +import java.sql.DriverManager; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractTestConnectionProvider; +import org.testcontainers.containers.Db2Container; + +public class DB2TestConnectionProvider extends AbstractTestConnectionProvider { + + private static Db2Container container = new Db2Container("icr.io/db2_community/db2:11.5.9.0") + .acceptLicense() + .withDatabaseName("wrenidm") + .withUsername("wrenidm") + .withPassword("wrenidm") + .withInitScript("vendor/db2.sql"); + + @Override + protected Connection openConnection(boolean first) throws Exception { + if (first) { + assertFalse(container.isRunning()); + container.start(); + } + return DriverManager.getConnection(container.getJdbcUrl(), container.getUsername(), container.getPassword()); + } + + @Override + protected void closeConnection(Connection connection, boolean last) throws Exception { + super.closeConnection(connection, last); + if (last) { + container.close(); + } + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/H2GenericTableHandlerIT.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/H2GenericTableHandlerIT.java new file mode 100644 index 0000000000..bc0ea66d71 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/H2GenericTableHandlerIT.java @@ -0,0 +1,39 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.H2TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractGenericTableHandlerTest; +import org.testng.annotations.Test; + +@Test(singleThreaded = true, suiteName = "h2") +public class H2GenericTableHandlerIT extends AbstractGenericTableHandlerTest { + + @Override + protected Connection getConnection() throws Exception { + return new H2TestConnectionProvider().getConnection(); + } + + @Override + protected TableHandler createTableHandler() { + if (LEGACY_MODE) { + return new H2TableHandler( + getTableConfig(), + getSchemaName(), + JsonValue.json(getQueryConfig()), + JsonValue.json(getCommandConfig()), + getBatchSize(), + getExceptionHandler()); + } + return new H2GenericTableHandler( + getSchemaName(), + getTableConfig(), + getQueryConfig(), + getCommandConfig(), + getBatchSize(), + getExceptionHandler() + ); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/H2MappedTableHandlerIT.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/H2MappedTableHandlerIT.java new file mode 100644 index 0000000000..b6c8b94474 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/H2MappedTableHandlerIT.java @@ -0,0 +1,41 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.crypto.impl.CryptoServiceImpl; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractMappedTableHandlerTest; +import org.testng.annotations.Test; + +@Test(singleThreaded = true, suiteName = "h2") +public class H2MappedTableHandlerIT extends AbstractMappedTableHandlerTest { + + @Override + protected Connection getConnection() throws Exception { + return new H2TestConnectionProvider().getConnection(); + } + + @Override + protected TableHandler createTableHandler() throws Exception { + if (LEGACY_MODE) { + return new org.forgerock.openidm.repo.jdbc.impl.H2MappedTableHandler( + getTableName(), + getColumnMapping().asMap(), + getSchemaName(), + JsonValue.json(getQueryConfig()), + JsonValue.json(getCommandConfig()), + getExceptionHandler(), + () -> new CryptoServiceImpl()); + + } + return new H2MappedTableHandler( + getSchemaName(), + getTableName(), + getColumnMapping(), + getQueryConfig(), + getCommandConfig(), + getExceptionHandler() + ); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/H2TestConnectionProvider.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/H2TestConnectionProvider.java new file mode 100644 index 0000000000..97fdaa3a26 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/H2TestConnectionProvider.java @@ -0,0 +1,18 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import java.sql.DriverManager; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractTestConnectionProvider; + +public class H2TestConnectionProvider extends AbstractTestConnectionProvider { + + @Override + protected Connection openConnection(boolean first) throws Exception { + String initSql = getResourcePath("vendor/h2.sql").replace("\\", "/").replace("'", "\'"); + return DriverManager.getConnection( + "jdbc:h2:mem:testdb" + (first ? ";INIT=RUNSCRIPT FROM '" + initSql + "'" : ""), + "wrenidm", + "wrenidm"); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MSSQLGenericTableHandlerIT.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MSSQLGenericTableHandlerIT.java new file mode 100644 index 0000000000..05fc36a623 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MSSQLGenericTableHandlerIT.java @@ -0,0 +1,39 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.MSSQLTableHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractGenericTableHandlerTest; +import org.testng.annotations.Test; + +@Test(singleThreaded = true, suiteName = "mssql") +public class MSSQLGenericTableHandlerIT extends AbstractGenericTableHandlerTest { + + @Override + protected Connection getConnection() throws Exception { + return new MSSQLTestConnectionProvider().getConnection(); + } + + @Override + protected TableHandler createTableHandler() { + if (LEGACY_MODE) { + return new MSSQLTableHandler( + getTableConfig(), + getSchemaName(), + JsonValue.json(getQueryConfig()), + JsonValue.json(getCommandConfig()), + getBatchSize(), + getExceptionHandler()); + } + return new MSSQLGenericTableHandler( + getSchemaName(), + getTableConfig(), + getQueryConfig(), + getCommandConfig(), + getBatchSize(), + getExceptionHandler() + ); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MSSQLMappedTableHandlerIT.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MSSQLMappedTableHandlerIT.java new file mode 100644 index 0000000000..aa6e05ffbb --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MSSQLMappedTableHandlerIT.java @@ -0,0 +1,41 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.crypto.impl.CryptoServiceImpl; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractMappedTableHandlerTest; +import org.testng.annotations.Test; + +@Test(singleThreaded = true, suiteName = "mssql") +public class MSSQLMappedTableHandlerIT extends AbstractMappedTableHandlerTest { + + @Override + protected Connection getConnection() throws Exception { + return new MSSQLTestConnectionProvider().getConnection(); + } + + @Override + protected TableHandler createTableHandler() throws Exception { + if (LEGACY_MODE) { + return new org.forgerock.openidm.repo.jdbc.impl.MSSQLMappedTableHandler( + getTableName(), + getColumnMapping().asMap(), + getSchemaName(), + JsonValue.json(getQueryConfig()), + JsonValue.json(getCommandConfig()), + getExceptionHandler(), + () -> new CryptoServiceImpl()); + + } + return new MSSQLMappedTableHandler( + getSchemaName(), + getTableName(), + getColumnMapping(), + getQueryConfig(), + getCommandConfig(), + getExceptionHandler() + ); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MSSQLTestConnectionProvider.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MSSQLTestConnectionProvider.java new file mode 100644 index 0000000000..61109f6cd3 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MSSQLTestConnectionProvider.java @@ -0,0 +1,37 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import static org.testng.Assert.assertFalse; + +import java.sql.Connection; +import java.sql.DriverManager; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractTestConnectionProvider; +import org.testcontainers.containers.JdbcDatabaseContainer; +import org.testcontainers.containers.MSSQLServerContainer; + +@SuppressWarnings("rawtypes") +public class MSSQLTestConnectionProvider extends AbstractTestConnectionProvider { + + private static final String IMAGE_NAME = "mcr.microsoft.com/mssql/server:2019-CU14-ubuntu-20.04"; + + private static JdbcDatabaseContainer container = new MSSQLServerContainer(IMAGE_NAME) + .acceptLicense() + .withInitScript("vendor/mssql.sql"); + + @Override + protected Connection openConnection(boolean first) throws Exception { + if (first) { + assertFalse(container.isRunning()); + container.start(); + } + return DriverManager.getConnection(container.getJdbcUrl(), "wrenidm", "wrenidm"); + } + + @Override + protected void closeConnection(Connection connection, boolean last) throws Exception { + super.closeConnection(connection, last); + if (last) { + container.stop(); + } + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MySQLGenericTableHandlerIT.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MySQLGenericTableHandlerIT.java new file mode 100644 index 0000000000..1402a95285 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MySQLGenericTableHandlerIT.java @@ -0,0 +1,39 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.MySQLTableHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractGenericTableHandlerTest; +import org.testng.annotations.Test; + +@Test(singleThreaded = true, suiteName = "mysql") +public class MySQLGenericTableHandlerIT extends AbstractGenericTableHandlerTest { + + @Override + protected Connection getConnection() throws Exception { + return new MySQLTestConnectionProvider().getConnection(); + } + + @Override + protected TableHandler createTableHandler() { + if (LEGACY_MODE) { + return new MySQLTableHandler( + getTableConfig(), + getSchemaName(), + JsonValue.json(getQueryConfig()), + JsonValue.json(getCommandConfig()), + getBatchSize(), + getExceptionHandler()); + } + return new MySQLGenericTableHandler( + getSchemaName(), + getTableConfig(), + getQueryConfig(), + getCommandConfig(), + getBatchSize(), + getExceptionHandler() + ); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MySQLMappedTableHandlerIT.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MySQLMappedTableHandlerIT.java new file mode 100644 index 0000000000..c2691e5043 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MySQLMappedTableHandlerIT.java @@ -0,0 +1,41 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.crypto.impl.CryptoServiceImpl; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractMappedTableHandlerTest; +import org.testng.annotations.Test; + +@Test(singleThreaded = true, suiteName = "mysql") +public class MySQLMappedTableHandlerIT extends AbstractMappedTableHandlerTest { + + @Override + protected Connection getConnection() throws Exception { + return new MySQLTestConnectionProvider().getConnection(); + } + + @Override + protected TableHandler createTableHandler() throws Exception { + if (LEGACY_MODE) { + return new org.forgerock.openidm.repo.jdbc.impl.MappedTableHandler( + getTableName(), + getColumnMapping().asMap(), + getSchemaName(), + JsonValue.json(getQueryConfig()), + JsonValue.json(getCommandConfig()), + getExceptionHandler(), + () -> new CryptoServiceImpl()); + + } + return new MySQLMappedTableHandler( + getSchemaName(), + getTableName(), + getColumnMapping(), + getQueryConfig(), + getCommandConfig(), + getExceptionHandler() + ); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MySQLTestConnectionProvider.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MySQLTestConnectionProvider.java new file mode 100644 index 0000000000..3440ce4a8a --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/MySQLTestConnectionProvider.java @@ -0,0 +1,18 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import java.sql.DriverManager; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractTestConnectionProvider; + +public class MySQLTestConnectionProvider extends AbstractTestConnectionProvider { + + @Override + protected Connection openConnection(boolean first) throws Exception { + return DriverManager.getConnection( + // https://github.com/testcontainers/testcontainers-java/issues/4121 + "jdbc:tc:mysql:8.0.36://hostname/wrenidm?TC_INITSCRIPT=vendor/mysql.sql", + "wrenidm", + "wrenidm"); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/OracleGenericTableHandlerIT.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/OracleGenericTableHandlerIT.java new file mode 100644 index 0000000000..aec7cd106c --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/OracleGenericTableHandlerIT.java @@ -0,0 +1,39 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.OracleTableHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractGenericTableHandlerTest; +import org.testng.annotations.Test; + +@Test(singleThreaded = true, suiteName = "oracle") +public class OracleGenericTableHandlerIT extends AbstractGenericTableHandlerTest { + + @Override + protected Connection getConnection() throws Exception { + return new OracleTestConnectionProvider().getConnection(); + } + + @Override + protected TableHandler createTableHandler() { + if (LEGACY_MODE) { + return new OracleTableHandler( + getTableConfig(), + getSchemaName(), + JsonValue.json(getQueryConfig()), + JsonValue.json(getCommandConfig()), + getBatchSize(), + getExceptionHandler()); + } + return new OracleGenericTableHandler( + getSchemaName(), + getTableConfig(), + getQueryConfig(), + getCommandConfig(), + getBatchSize(), + getExceptionHandler() + ); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/OracleMappedTableHandlerIT.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/OracleMappedTableHandlerIT.java new file mode 100644 index 0000000000..ae40f48e6a --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/OracleMappedTableHandlerIT.java @@ -0,0 +1,41 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.crypto.impl.CryptoServiceImpl; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractMappedTableHandlerTest; +import org.testng.annotations.Test; + +@Test(singleThreaded = true, suiteName = "oracle") +public class OracleMappedTableHandlerIT extends AbstractMappedTableHandlerTest { + + @Override + protected Connection getConnection() throws Exception { + return new OracleTestConnectionProvider().getConnection(); + } + + @Override + protected TableHandler createTableHandler() throws Exception { + if (LEGACY_MODE) { + return new org.forgerock.openidm.repo.jdbc.impl.OracleMappedTableHandler( + getTableName(), + getColumnMapping().asMap(), + getSchemaName(), + JsonValue.json(getQueryConfig()), + JsonValue.json(getCommandConfig()), + getExceptionHandler(), + () -> new CryptoServiceImpl()); + + } + return new OracleMappedTableHandler( + getSchemaName(), + getTableName(), + getColumnMapping(), + getQueryConfig(), + getCommandConfig(), + getExceptionHandler() + ); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/OracleTestConnectionProvider.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/OracleTestConnectionProvider.java new file mode 100644 index 0000000000..7ac7c017b3 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/OracleTestConnectionProvider.java @@ -0,0 +1,39 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import static org.testng.Assert.assertFalse; + +import java.sql.Connection; +import java.sql.DriverManager; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractTestConnectionProvider; +import org.testcontainers.containers.JdbcDatabaseContainer; +import org.testcontainers.containers.OracleContainer; + +@SuppressWarnings("rawtypes") +public class OracleTestConnectionProvider extends AbstractTestConnectionProvider { + + private static final String IMAGE_NAME = "gvenzl/oracle-xe:21-slim-faststart"; + + private static JdbcDatabaseContainer container = new OracleContainer(IMAGE_NAME) + .withDatabaseName("wrenidm") + .withUsername("wrenidm") + .withPassword("wrenidm") + .withInitScript("vendor/oracle.sql"); + + @Override + protected Connection openConnection(boolean first) throws Exception { + if (first) { + assertFalse(container.isRunning()); + container.start(); + } + return DriverManager.getConnection(container.getJdbcUrl(), "wrenidm", "wrenidm"); + } + + @Override + protected void closeConnection(Connection connection, boolean last) throws Exception { + super.closeConnection(connection, last); + if (last) { + container.stop(); + } + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/PostgreSQLGenericTableHandlerIT.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/PostgreSQLGenericTableHandlerIT.java new file mode 100644 index 0000000000..d106e3481c --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/PostgreSQLGenericTableHandlerIT.java @@ -0,0 +1,39 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.PostgreSQLTableHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractGenericTableHandlerTest; +import org.testng.annotations.Test; + +@Test(singleThreaded = true, suiteName = "postgresql") +public class PostgreSQLGenericTableHandlerIT extends AbstractGenericTableHandlerTest { + + @Override + protected Connection getConnection() throws Exception { + return new PostgreSQLTestConnectionProvider().getConnection(); + } + + @Override + protected TableHandler createTableHandler() { + if (LEGACY_MODE) { + return new PostgreSQLTableHandler( + getTableConfig(), + getSchemaName(), + JsonValue.json(getQueryConfig()), + JsonValue.json(getCommandConfig()), + getBatchSize(), + getExceptionHandler()); + } + return new PostgreSQLGenericTableHandler( + getSchemaName(), + getTableConfig(), + getQueryConfig(), + getCommandConfig(), + getBatchSize(), + getExceptionHandler() + ); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/PostgreSQLMappedTableHandlerIT.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/PostgreSQLMappedTableHandlerIT.java new file mode 100644 index 0000000000..d02d7a50ba --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/PostgreSQLMappedTableHandlerIT.java @@ -0,0 +1,41 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import org.forgerock.json.JsonValue; +import org.forgerock.openidm.crypto.impl.CryptoServiceImpl; +import org.forgerock.openidm.repo.jdbc.TableHandler; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractMappedTableHandlerTest; +import org.testng.annotations.Test; + +@Test(singleThreaded = true, suiteName = "postgresql") +public class PostgreSQLMappedTableHandlerIT extends AbstractMappedTableHandlerTest { + + @Override + protected Connection getConnection() throws Exception { + return new PostgreSQLTestConnectionProvider().getConnection(); + } + + @Override + protected TableHandler createTableHandler() throws Exception { + if (LEGACY_MODE) { + return new org.forgerock.openidm.repo.jdbc.impl.PostgreSQLMappedTableHandler( + getTableName(), + getColumnMapping().asMap(), + getSchemaName(), + JsonValue.json(getQueryConfig()), + JsonValue.json(getCommandConfig()), + getExceptionHandler(), + () -> new CryptoServiceImpl()); + + } + return new PostgreSQLMappedTableHandler( + getSchemaName(), + getTableName(), + getColumnMapping(), + getQueryConfig(), + getCommandConfig(), + getExceptionHandler() + ); + } + +} diff --git a/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/PostgreSQLTestConnectionProvider.java b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/PostgreSQLTestConnectionProvider.java new file mode 100644 index 0000000000..e3ffc92e92 --- /dev/null +++ b/openidm-repo-jdbc/src/test/java/org/forgerock/openidm/repo/jdbc/impl/refactor/vendor/PostgreSQLTestConnectionProvider.java @@ -0,0 +1,17 @@ +package org.forgerock.openidm.repo.jdbc.impl.refactor.vendor; + +import java.sql.Connection; +import java.sql.DriverManager; +import org.forgerock.openidm.repo.jdbc.impl.refactor.handler.AbstractTestConnectionProvider; + +public class PostgreSQLTestConnectionProvider extends AbstractTestConnectionProvider { + + @Override + protected Connection openConnection(boolean first) throws Exception { + return DriverManager.getConnection( + "jdbc:tc:postgresql:13.13:///wrenidm?TC_INITSCRIPT=vendor/postgresql.sql", + "wrenidm", + "wrenidm"); + } + +} diff --git a/openidm-repo-jdbc/src/test/resources/hsqldb.sql b/openidm-repo-jdbc/src/test/resources/hsqldb.sql new file mode 100644 index 0000000000..25d04f96d6 --- /dev/null +++ b/openidm-repo-jdbc/src/test/resources/hsqldb.sql @@ -0,0 +1,42 @@ +CREATE SCHEMA wrenidm; + +CREATE TABLE wrenidm.objecttypes ( + id INTEGER IDENTITY NOT NULL, + objecttype VARCHAR(255) NOT NULL, + PRIMARY KEY (id), + CONSTRAINT idx_objecttypes_objecttype UNIQUE (objecttype) +); + +CREATE TABLE wrenidm.genericobjects ( + id INTEGER IDENTITY NOT NULL, + objecttypes_id INTEGER NOT NULL, + objectid VARCHAR(255) NOT NULL, + rev VARCHAR(38) NOT NULL, + fullobject VARCHAR(65535), + PRIMARY KEY (id), + CONSTRAINT fk_genericobjects_objecttypes FOREIGN KEY (objecttypes_id) + REFERENCES wrenidm.objecttypes (id) ON DELETE CASCADE ON UPDATE NO ACTION, + CONSTRAINT idx_genericobjects_object UNIQUE (objecttypes_id, objectid) +); + +CREATE TABLE wrenidm.genericobjectproperties ( + genericobjects_id BIGINT NOT NULL, + propkey VARCHAR(255) NOT NULL, + proptype VARCHAR(32), + propvalue VARCHAR(65535), + CONSTRAINT fk_genericobjectproperties_genericobjects FOREIGN KEY (genericobjects_id) + REFERENCES wrenidm.genericobjects (id) ON DELETE CASCADE ON UPDATE NO ACTION +); +CREATE INDEX fk_genericobjectproperties_genericobjects ON wrenidm.genericobjectproperties (genericobjects_id); +CREATE INDEX idx_genericobjectproperties_prop ON wrenidm.genericobjectproperties (propkey, propvalue); + +CREATE TABLE wrenidm.managedgreeting ( + objectid VARCHAR(255) NOT NULL, + rev VARCHAR(38) NOT NULL, + name VARCHAR(255), + ranking VARCHAR(32), + visible VARCHAR(5), + tags VARCHAR(255), + meta VARCHAR(2048), + PRIMARY KEY (objectid) +); diff --git a/openidm-repo-jdbc/src/test/resources/vendor/db2.sql b/openidm-repo-jdbc/src/test/resources/vendor/db2.sql new file mode 100644 index 0000000000..4cd7a11ae0 --- /dev/null +++ b/openidm-repo-jdbc/src/test/resources/vendor/db2.sql @@ -0,0 +1,46 @@ +CREATE SCHEMA wrenidm; + +CREATE BUFFERPOOL bpwrenidm32 PAGESIZE 32K; +CREATE TABLESPACE swrenidm_idx PAGESIZE 32K BUFFERPOOL bpwrenidm32; + +CREATE TABLE wrenidm.objecttypes ( + id INTEGER GENERATED BY DEFAULT AS IDENTITY (CYCLE), + objecttype VARCHAR(255) NOT NULL, + PRIMARY KEY (id), + CONSTRAINT idx_objecttypes_objecttype UNIQUE (objecttype) +); + +CREATE TABLE wrenidm.genericobjects ( + id INTEGER GENERATED BY DEFAULT AS IDENTITY (CYCLE), + objecttypes_id INTEGER NOT NULL, + objectid VARCHAR(255) NOT NULL, + rev VARCHAR(38) NOT NULL, + fullobject CLOB(2M), + PRIMARY KEY (id), + CONSTRAINT fk_genericobjects_objecttypes FOREIGN KEY (objecttypes_id) + REFERENCES wrenidm.objecttypes (id) ON DELETE CASCADE, + CONSTRAINT idx_genericobjects_object UNIQUE (objecttypes_id, objectid) +); + +CREATE TABLE wrenidm.genericobjectproperties ( + genericobjects_id INTEGER NOT NULL, + propkey VARCHAR(255) NOT NULL, + proptype VARCHAR(32), + propvalue VARCHAR(2000), + CONSTRAINT fk_genericobjectproperties_genericobjects FOREIGN KEY (genericobjects_id) + REFERENCES wrenidm.genericobjects (id) ON DELETE CASCADE +) INDEX IN swrenidm_idx; +CREATE INDEX fk_genericobjectproperties_genericobjects ON wrenidm.genericobjectproperties (genericobjects_id); +CREATE INDEX idx_genericobjectproperties_propkey ON wrenidm.genericobjectproperties (propkey); +CREATE INDEX idx_genericobjectproperties_propvalue ON wrenidm.genericobjectproperties (propvalue); + +CREATE TABLE wrenidm.managedgreeting ( + objectid VARCHAR(255) NOT NULL, + rev VARCHAR(38) NOT NULL, + name VARCHAR(255), + ranking VARCHAR(32), + visible VARCHAR(5), + tags VARCHAR(255), + meta VARCHAR(2048), + PRIMARY KEY (objectid) +); diff --git a/openidm-repo-jdbc/src/test/resources/vendor/h2.sql b/openidm-repo-jdbc/src/test/resources/vendor/h2.sql new file mode 100644 index 0000000000..121759654b --- /dev/null +++ b/openidm-repo-jdbc/src/test/resources/vendor/h2.sql @@ -0,0 +1,42 @@ +CREATE SCHEMA wrenidm AUTHORIZATION wrenidm; + +CREATE TABLE wrenidm.objecttypes ( + id BIGSERIAL NOT NULL, + objecttype VARCHAR(255) NOT NULL, + PRIMARY KEY (id), + CONSTRAINT idx_objecttypes_objecttype UNIQUE (objecttype) +); + +CREATE TABLE wrenidm.genericobjects ( + id BIGSERIAL NOT NULL, + objecttypes_id INTEGER NOT NULL, + objectid VARCHAR(255) NOT NULL, + rev VARCHAR(38) NOT NULL, + fullobject JSON, + PRIMARY KEY (id), + CONSTRAINT fk_genericobjects_objecttypes FOREIGN KEY (objecttypes_id) + REFERENCES wrenidm.objecttypes (id) ON DELETE CASCADE ON UPDATE NO ACTION, + CONSTRAINT idx_genericobjects_object UNIQUE (objecttypes_id, objectid) +); + +CREATE TABLE wrenidm.genericobjectproperties ( + genericobjects_id BIGSERIAL NOT NULL, + propkey VARCHAR(255) NOT NULL, + proptype VARCHAR(32), + propvalue VARCHAR(65535), + CONSTRAINT fk_genericobjectproperties_genericobjects FOREIGN KEY (genericobjects_id) + REFERENCES wrenidm.genericobjects (id) ON DELETE CASCADE ON UPDATE NO ACTION +); +CREATE INDEX fk_genericobjectproperties_genericobjects ON wrenidm.genericobjectproperties (genericobjects_id); +CREATE INDEX idx_genericobjectproperties_prop ON wrenidm.genericobjectproperties (propkey, propvalue); + +CREATE TABLE wrenidm.managedgreeting ( + objectid VARCHAR(255) NOT NULL, + rev VARCHAR(38) NOT NULL, + name VARCHAR(255), + ranking VARCHAR(32), + visible VARCHAR(5), + tags VARCHAR(255), + meta VARCHAR(2048), + PRIMARY KEY (objectid) +); diff --git a/openidm-repo-jdbc/src/test/resources/vendor/mssql.sql b/openidm-repo-jdbc/src/test/resources/vendor/mssql.sql new file mode 100644 index 0000000000..4063180e64 --- /dev/null +++ b/openidm-repo-jdbc/src/test/resources/vendor/mssql.sql @@ -0,0 +1,55 @@ +CREATE DATABASE [wrenidm] COLLATE Latin1_General_100_CS_AS; + +USE [wrenidm]; + +CREATE LOGIN [wrenidm] WITH + PASSWORD = N'wrenidm', + CHECK_POLICY = OFF, + CHECK_EXPIRATION = OFF, + DEFAULT_DATABASE = [wrenidm]; + +CREATE USER [wrenidm] FOR LOGIN [wrenidm] WITH DEFAULT_SCHEMA = [wrenidm]; + +CREATE SCHEMA [wrenidm] AUTHORIZATION [wrenidm]; + +CREATE TABLE wrenidm.objecttypes ( + id NUMERIC(19,0) NOT NULL IDENTITY, + objecttype NVARCHAR(255) NOT NULL, + PRIMARY KEY CLUSTERED (id), + CONSTRAINT idx_objecttypes_objecttype UNIQUE (objecttype) +); + +CREATE TABLE wrenidm.genericobjects ( + id NUMERIC(19,0) NOT NULL IDENTITY, + objecttypes_id NUMERIC(19,0) NOT NULL, + objectid NVARCHAR(255) NOT NULL, + rev NVARCHAR(38) NOT NULL, + fullobject NTEXT, + PRIMARY KEY (id), + CONSTRAINT fk_genericobjects_objecttypes FOREIGN KEY (objecttypes_id) + REFERENCES wrenidm.objecttypes (id) ON DELETE CASCADE, + CONSTRAINT idx_genericobjects_object UNIQUE (objecttypes_id, objectid) +); + +CREATE TABLE wrenidm.genericobjectproperties ( + genericobjects_id NUMERIC(19,0) NOT NULL, + propkey NVARCHAR(255) NOT NULL, + proptype NVARCHAR(32), + propvalue NVARCHAR(2000), + CONSTRAINT fk_genericobjectproperties_genericobjects FOREIGN KEY (genericobjects_id) + REFERENCES wrenidm.genericobjects (id) ON DELETE CASCADE +); +CREATE INDEX fk_genericobjectproperties_genericobjects ON wrenidm.genericobjectproperties (genericobjects_id); +CREATE INDEX idx_genericobjectproperties_propkey ON wrenidm.genericobjectproperties (propkey); +CREATE INDEX idx_genericobjectproperties_propvalue ON wrenidm.genericobjectproperties (propvalue); + +CREATE TABLE wrenidm.managedgreeting ( + objectid NVARCHAR(255) NOT NULL, + rev NVARCHAR(38) NOT NULL, + name NVARCHAR(255), + ranking NVARCHAR(32), + visible NVARCHAR(5), + tags NVARCHAR(255), + meta NVARCHAR(2048), + PRIMARY KEY (objectid) +); diff --git a/openidm-repo-jdbc/src/test/resources/vendor/mysql.sql b/openidm-repo-jdbc/src/test/resources/vendor/mysql.sql new file mode 100644 index 0000000000..8df22b9015 --- /dev/null +++ b/openidm-repo-jdbc/src/test/resources/vendor/mysql.sql @@ -0,0 +1,41 @@ +CREATE TABLE wrenidm.objecttypes ( + id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + objecttype VARCHAR(255) NOT NULL, + PRIMARY KEY (id), + CONSTRAINT idx_objecttypes_objecttype UNIQUE (objecttype) +); + +CREATE TABLE wrenidm.genericobjects ( + id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + objecttypes_id BIGINT UNSIGNED NOT NULL, + objectid VARCHAR(255) NOT NULL, + rev VARCHAR(38) NOT NULL, + fullobject MEDIUMTEXT, + PRIMARY KEY (id), + CONSTRAINT fk_genericobjects_objecttypes FOREIGN KEY (objecttypes_id) + REFERENCES wrenidm.objecttypes (id) ON DELETE CASCADE, + CONSTRAINT idx_genericobjects_object UNIQUE (objecttypes_id, objectid) +) ENGINE = InnoDB; + +CREATE TABLE wrenidm.genericobjectproperties ( + genericobjects_id BIGINT UNSIGNED NOT NULL, + propkey VARCHAR(255) NOT NULL, + proptype VARCHAR(32), + propvalue VARCHAR(768), + CONSTRAINT fk_genericobjectproperties_genericobjects FOREIGN KEY (genericobjects_id) + REFERENCES wrenidm.genericobjects (id) ON DELETE CASCADE ON UPDATE NO ACTION +) ENGINE = InnoDB; +CREATE INDEX fk_genericobjectproperties_genericobjects ON wrenidm.genericobjectproperties (genericobjects_id); +CREATE INDEX idx_genericobjectproperties_propkey ON wrenidm.genericobjectproperties (propkey); +CREATE INDEX idx_genericobjectproperties_propvalue ON wrenidm.genericobjectproperties (propvalue); + +CREATE TABLE wrenidm.managedgreeting ( + objectid VARCHAR(255) NOT NULL, + rev VARCHAR(38) NOT NULL, + name VARCHAR(255), + ranking VARCHAR(32), + visible VARCHAR(5), + tags VARCHAR(255), + meta VARCHAR(2048), + PRIMARY KEY (objectid) +) ENGINE = InnoDB; diff --git a/openidm-repo-jdbc/src/test/resources/vendor/oracle.sql b/openidm-repo-jdbc/src/test/resources/vendor/oracle.sql new file mode 100644 index 0000000000..4ee16df049 --- /dev/null +++ b/openidm-repo-jdbc/src/test/resources/vendor/oracle.sql @@ -0,0 +1,40 @@ +CREATE TABLE wrenidm.objecttypes ( + id NUMBER(24,0) GENERATED BY DEFAULT ON NULL AS IDENTITY, + objecttype VARCHAR2(255 CHAR), + PRIMARY KEY (id), + CONSTRAINT idx_objecttypes_objecttype UNIQUE (objecttype) +); + +CREATE TABLE wrenidm.genericobjects ( + id NUMBER(24,0) GENERATED BY DEFAULT ON NULL AS IDENTITY, + objecttypes_id NUMBER(24,0) NOT NULL, + objectid VARCHAR2(255 CHAR) NOT NULL, + rev VARCHAR2(38 CHAR) NOT NULL, + fullobject CLOB, + PRIMARY KEY (id), + CONSTRAINT fk_genericobjects_objecttypes FOREIGN KEY (objecttypes_id) + REFERENCES wrenidm.objecttypes (id) ON DELETE CASCADE, + CONSTRAINT idx_genericobjects_object UNIQUE (objecttypes_id, objectid) +); + +CREATE TABLE wrenidm.genericobjectproperties ( + genericobjects_id NUMBER(24,0) NOT NULL, + propkey VARCHAR2(255 CHAR) NOT NULL, + proptype VARCHAR2(32 CHAR), + propvalue VARCHAR2(2000 CHAR), + CONSTRAINT fk_genericobjectproperties_genericobjects FOREIGN KEY (genericobjects_id) + REFERENCES wrenidm.genericobjects (id) ON DELETE CASCADE +); +CREATE INDEX fk_genericobjectproperties_genericobjects ON wrenidm.genericobjectproperties (genericobjects_id); +CREATE INDEX idx_genericobjectproperties_prop ON wrenidm.genericobjectproperties (propkey, propvalue); + +CREATE TABLE wrenidm.managedgreeting ( + objectid VARCHAR2(255 CHAR) NOT NULL, + rev VARCHAR2(38 CHAR) NOT NULL, + name VARCHAR2(255), + ranking VARCHAR2(32 CHAR), + visible VARCHAR2(5 CHAR), + tags VARCHAR2(255), + meta VARCHAR2(2048), + PRIMARY KEY (objectid) +); diff --git a/openidm-repo-jdbc/src/test/resources/vendor/postgresql.sql b/openidm-repo-jdbc/src/test/resources/vendor/postgresql.sql new file mode 100644 index 0000000000..41f21ff578 --- /dev/null +++ b/openidm-repo-jdbc/src/test/resources/vendor/postgresql.sql @@ -0,0 +1,44 @@ +CREATE SCHEMA wrenidm; + +CREATE TABLE wrenidm.objecttypes ( + id BIGSERIAL NOT NULL, + objecttype VARCHAR(255) NOT NULL, + PRIMARY KEY (id), + CONSTRAINT idx_objecttypes_objecttype UNIQUE (objecttype) +); + +CREATE TABLE wrenidm.genericobjects ( + id BIGSERIAL NOT NULL, + objecttypes_id INTEGER NOT NULL, + objectid VARCHAR(255) NOT NULL, + rev VARCHAR(38) NOT NULL, + fullobject JSON, + PRIMARY KEY (id), + CONSTRAINT fk_genericobjects_objecttypes FOREIGN KEY (objecttypes_id) + REFERENCES wrenidm.objecttypes (id) ON DELETE CASCADE ON UPDATE NO ACTION, + CONSTRAINT idx_genericobjects_object UNIQUE (objecttypes_id, objectid) +); + +-- XXX remove after refactor +CREATE TABLE wrenidm.genericobjectproperties ( + genericobjects_id BIGSERIAL NOT NULL, + propkey VARCHAR(255) NOT NULL, + proptype VARCHAR(32), + propvalue VARCHAR(65535), + CONSTRAINT fk_genericobjectproperties_genericobjects FOREIGN KEY (genericobjects_id) + REFERENCES wrenidm.genericobjects (id) ON DELETE CASCADE ON UPDATE NO ACTION +); +CREATE INDEX fk_genericobjectproperties_genericobjects ON wrenidm.genericobjectproperties (genericobjects_id); +CREATE INDEX idx_genericobjectproperties_prop ON wrenidm.genericobjectproperties (propkey, propvalue); + + +CREATE TABLE wrenidm.managedgreeting ( + objectid VARCHAR(255) NOT NULL, + rev VARCHAR(38) NOT NULL, + name VARCHAR(255), + ranking VARCHAR(32), + visible VARCHAR(5), + tags JSON, + meta JSON, + PRIMARY KEY (objectid) +); diff --git a/openidm-repo/src/main/java/org/forgerock/openidm/repo/util/TokenHandler.java b/openidm-repo/src/main/java/org/forgerock/openidm/repo/util/TokenHandler.java index ffc1edbaa2..305d0a61bd 100644 --- a/openidm-repo/src/main/java/org/forgerock/openidm/repo/util/TokenHandler.java +++ b/openidm-repo/src/main/java/org/forgerock/openidm/repo/util/TokenHandler.java @@ -2,6 +2,7 @@ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright © 2011 ForgeRock AS. All rights reserved. + * Portions Copyright 2023 Wren Security. * * The contents of this file are subject to the terms * of the Common Development and Distribution License @@ -29,117 +30,72 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.forgerock.json.resource.BadRequestException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +/** + * Utility class for resolving and replacing named parameters (substitution tokens) in query strings. + * + *

    + * Substitution tokens are in the format of ${token-name}. + */ public class TokenHandler { - final static Logger logger = LoggerFactory.getLogger(TokenHandler.class); - - // The OpenIDM query token is of format ${token-name} - Pattern tokenPattern = Pattern.compile("\\$\\{(.+?)\\}"); + private static final Pattern TOKEN_PATTERN = Pattern.compile("\\$\\{(.+?)\\}"); /** - * Replaces a query string with tokens of format ${token-name} with the values from the - * passed in map, where the token-name must be the key in the map + * Extracts all the token names in the query string of format ${token-name} * * @param queryString the query with tokens - * @param params the parameters to replace the tokens. Values can be String or List. - * @return the query with all tokens replace with their found values - * @throws BadRequestException if token in the query is not in the passed parameters + * @return the list of token names in the order they appear in the {@code queryString} */ - public String replaceTokensWithValues(String queryString, Map params) - throws BadRequestException { - java.util.regex.Matcher matcher = tokenPattern.matcher(queryString); - StringBuffer buffer = new StringBuffer(); + public List extractTokens(String queryString) { + List tokens = new ArrayList<>(); + Matcher matcher = TOKEN_PATTERN.matcher(queryString); while (matcher.find()) { - String tokenKey = matcher.group(1); - if (!params.containsKey(tokenKey)) { - // fail with an exception if token not found - throw new BadRequestException("Missing entry in params passed to query for token " + tokenKey); - } else { - Object replacement = params.get(tokenKey); - if (replacement instanceof List) { - StringBuffer commaSeparated = new StringBuffer(); - boolean first = true; - for (Object entry : ((List) replacement)) { - if (!first) { - commaSeparated.append(","); - } else { - first = false; - } - commaSeparated.append(entry.toString()); - } - replacement = commaSeparated.toString(); - } - if (replacement == null) { - replacement = ""; - } - matcher.appendReplacement(buffer, ""); - buffer.append(replacement); - } + tokens.add(matcher.group(1)); } - matcher.appendTail(buffer); - return buffer.toString(); - } - - /** - * Replaces a query string with tokens of format ${token-name} with the - * specified replacement string for all tokens. - * - * @param queryString the query with tokens to replace - * @param replacement the replacement string - * @return the query with all tokens replaced - */ - public String replaceTokens(String queryString, String replacement) { - return replaceTokens(queryString, replacement, new String[] {}); + return tokens; } /** - * Replaces a query string with tokens of format ${token-name} with the - * specified replacement string for all tokens. + * Replaces tokens of format ${token-name} in a query string with the specified + * replacement string for all tokens. * - * @param queryString the query with tokens to replace + * @param queryString the query string with tokens to replace * @param replacement the replacement string - * @param nonReplacementTokenPrefixes optional array of prefixes that, if found as part of a token, - * will not be replaced - * @return the query with all tokens replaced + * @param excludePrefixes optional array of prefixes that, if found as part of a token, will + * not be replaced + * @return the query string with all tokens replaced */ - public String replaceTokens(String queryString, String replacement, String... nonReplacementTokenPrefixes) { - Matcher matcher = tokenPattern.matcher(queryString); - StringBuffer buf = new StringBuffer(); + public String replaceTokens(String queryString, String replacement, String ... excludePrefixes) { + Matcher matcher = TOKEN_PATTERN.matcher(queryString); + StringBuffer result = new StringBuffer(); while (matcher.find()) { - String origToken = matcher.group(1); - //TODO: the size check seems invalid - if (origToken != null) { - // OrientDB token is of format :token-name - matcher.appendReplacement(buf, ""); - // if token has one of the "non-replacement" prefixes, leave it alone - if (tokenStartsWithPrefix(origToken, nonReplacementTokenPrefixes)) { - buf.append("${" + origToken + "}"); - } - else { - buf.append(replacement); + String tokenName = matcher.group(1); + if (tokenName != null) { + matcher.appendReplacement(result, ""); + if (hasTokenPrefix(tokenName, excludePrefixes)) { + result.append("${" + tokenName + "}"); + } else { + result.append(replacement); } } } - matcher.appendTail(buf); - return buf.toString(); + matcher.appendTail(result); + return result.toString(); } /** - * Returns whether the token starts with one of the prefixes passed. + * Test whether the given token string has one of the provided prefixes in the format + * prefix:name. * - * @param token the token to interrogate - * @param prefixes a list of prefixes - * @return whether the passed token starts with one of the prefixes + * @param token string token to test + * @param prefixes token prefixes to search for + * @return true if the token has one of the provided prefixes */ - private boolean tokenStartsWithPrefix(String token, String... prefixes) { + private boolean hasTokenPrefix(String token, String ... prefixes) { String[] tokenParts = token.split(":", 2); - if (tokenParts.length == 2) { + if (tokenParts.length > 1) { for (String prefix : prefixes) { - if (prefix.equals(tokenParts[0])) { + if (prefix.equals(tokenParts[0]) ) { return true; } } @@ -148,88 +104,75 @@ private boolean tokenStartsWithPrefix(String token, String... prefixes) { } /** - * Extracts all the token names in the query string of format ${token-name} - * - * @param queryString the query with tokens - * @return the list of token names, in the order they appear in the queryString - */ - public List extractTokens(String queryString) { - List tokens = new ArrayList(); - Matcher matcher = tokenPattern.matcher(queryString); - while (matcher.find()) { - String origToken = matcher.group(1); - tokens.add(origToken); - } - return tokens; - } - - /** - * Replaces some tokens in a query string with tokens of format ${token-name} + * Replace requested tokens in a query string with tokens of format ${token-name} * with the given replacements, which may again be tokens (e.g. in another format) * or values. Tokens that have no replacement defined stay in the original token format. * + *

    + * CAUTION: This method does not do any escaping or format checking and it is the + * responsibility of the caller to provide safe and sanitized replacement values. * - * @param queryString the query with OpenIDM format tokens ${token} - * @param replacements the replacement values/tokens, where the key is the token name in the query string, - * and the value is the String to replace it with. + * @param queryString the query with tokens of format ${token-name} + * @param replacements the replacement strings, where the key is the token name in the query string, + * and the value is the string to replace it with * @return the query with any defined replacement values/tokens replaced, and the remaining tokens * left in the original format */ public String replaceSomeTokens(String queryString, Map replacements) { - Matcher matcher = tokenPattern.matcher(queryString); - StringBuffer buf = new StringBuffer(); + Matcher matcher = TOKEN_PATTERN.matcher(queryString); + StringBuffer result = new StringBuffer(); while (matcher.find()) { - String origToken = matcher.group(1); - if (origToken != null) { - String replacement = replacements.get(origToken); + String tokenName = matcher.group(1); + if (tokenName != null) { + String replacement = replacements.get(tokenName); if (replacement == null) { - // if not replacement specified, keep the original token. - replacement = "${" + origToken + "}"; + // if replacement not specified, keep the original token + replacement = "${" + tokenName + "}"; } - matcher.appendReplacement(buf, ""); - buf.append(replacement); + matcher.appendReplacement(result, ""); + result.append(replacement); } } - matcher.appendTail(buf); - return buf.toString(); + matcher.appendTail(result); + return result.toString(); } /** - * Replaces some tokens in a query string with tokens of format ${token-name} - * where token-name represents a list of values. The numberOfReplacements Map tells + * Replaces requested tokens in a query string with tokens of format ${token-name} + * where token-name represents a list of values. The ${code numberOfReplacements} map tells * how many replacements to produce (comma-separated) for each token. The replacement * (for all tokens) is provided. Tokens that have no replacement defined stay in the * original token format. * - * @param queryString the query with OpenIDM format tokens ${token} - * @param numberOfReplacements the number of replacements to replace a ${token} with - * @param replacement the replacement values/tokens - * @return the query with any defined replacement values/tokens replaced, and the remaining tokens + * @param queryString the query with tokens of format tokens ${token-name} + * @param numberOfReplacements the number of replacements to replace a ${token-name} with + * @param replacement the replacement string that will be repeated as specified by the number of + * replacements + * @return the query with any defined replacement values replaced, and the remaining tokens * left in the original format */ public String replaceListTokens(String queryString, Map numberOfReplacements, String replacement) { - Matcher matcher = tokenPattern.matcher(queryString); - StringBuffer buf = new StringBuffer(); + Matcher matcher = TOKEN_PATTERN.matcher(queryString); + StringBuffer result = new StringBuffer(); while (matcher.find()) { - String origToken = matcher.group(1); - if (origToken != null) { - matcher.appendReplacement(buf, ""); - Integer length = numberOfReplacements.get(origToken); + String tokenName = matcher.group(1); + if (tokenName != null) { + matcher.appendReplacement(result, ""); + Integer length = numberOfReplacements.get(tokenName); if (length != null) { for (int i = 0; i < length; i++) { - buf.append(replacement); + result.append(replacement); if (i != length - 1) { - buf.append(", "); + result.append(", "); } } - } - else { - buf.append("${" + origToken + "}"); + } else { + result.append("${" + tokenName + "}"); } } } - matcher.appendTail(buf); - return buf.toString(); + matcher.appendTail(result); + return result.toString(); } } diff --git a/openidm-repo/src/test/java/org/forgerock/openidm/repo/util/TokenHandlerTest.java b/openidm-repo/src/test/java/org/forgerock/openidm/repo/util/TokenHandlerTest.java new file mode 100644 index 0000000000..78693633be --- /dev/null +++ b/openidm-repo/src/test/java/org/forgerock/openidm/repo/util/TokenHandlerTest.java @@ -0,0 +1,88 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.1.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.1.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2023 Wren Security. All rights reserved. + */ +package org.forgerock.openidm.repo.util; + +import static org.testng.Assert.assertEquals; + +import java.util.List; +import java.util.Map; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +/** + * {@link TokenHandler} test cases. + */ +public class TokenHandlerTest { + + @DataProvider + public Object[][] extractTokensData() { + return new Object[][] { + { "HELLO ${WORLD}", List.of("WORLD") } + }; + } + + @Test(dataProvider = "extractTokensData") + public void testExtractTokens(String statement, List expected) { + TokenHandler tokenHandler = new TokenHandler(); + assertEquals(tokenHandler.extractTokens("HELLO ${WORLD}"), expected); + } + + @DataProvider + public Object[][] replaceTokensData() { + return new Object[][] { + { "HELLO ${WORLD}", new String[0], "HELLO ?" }, + { "FOO ${list:FOO} BAR ${BAR}", new String[] { "list" }, "FOO ${list:FOO} BAR ?" }, + { "${START} ${MIDDLE} ${FINISH}", new String[0], "? ? ?" }, + { "${START} MIDDLE ${FINISH}", new String[0], "? MIDDLE ?" }, + }; + } + + @Test(dataProvider = "replaceTokensData") + public void testReplaceTokens(String statement, String[] excludes, String expected) { + TokenHandler tokenHandler = new TokenHandler(); + assertEquals(tokenHandler.replaceTokens(statement, "?", excludes), expected); + } + + @DataProvider + public Object[][] replaceSomeTokensData() { + return new Object[][] { + { "HELLO ${WORLD}", Map.of("WORLD", "UNIVERSE"), "HELLO UNIVERSE" }, + { "HELLO ${WORLD}", Map.of("FOO", "BAR"), "HELLO ${WORLD}" } + }; + } + + @Test(dataProvider = "replaceSomeTokensData") + public void testReplaceSomeTokens(String statement, Map replacements, String expected) { + TokenHandler tokenHandler = new TokenHandler(); + assertEquals(tokenHandler.replaceSomeTokens(statement, replacements), expected); + } + + @DataProvider + public Object[][] replaceListTokensData() { + return new Object[][] { + { "foo IN (${bar})", Map.of("bar", 1), "foo IN (?)" }, + { "foo IN (${bar})", Map.of("bar", 2), "foo IN (?, ?)" }, + { "foo IN (${bar})", Map.of(), "foo IN (${bar})" } + }; + } + + @Test(dataProvider = "replaceListTokensData") + public void testReplaceListTokens(String statement, Map counts, String expected) { + TokenHandler tokenHandler = new TokenHandler(); + assertEquals(tokenHandler.replaceListTokens(statement, counts, "?"), expected); + } + +} diff --git a/openidm-system/src/main/java/org/forgerock/openidm/core/ServerConstants.java b/openidm-system/src/main/java/org/forgerock/openidm/core/ServerConstants.java index d9095fecc6..8e64aefd6e 100644 --- a/openidm-system/src/main/java/org/forgerock/openidm/core/ServerConstants.java +++ b/openidm-system/src/main/java/org/forgerock/openidm/core/ServerConstants.java @@ -265,7 +265,9 @@ public static String getDisplayVersion() { * Key available for token substitution inside the query expression, * identifying the resource queried by name. Example use: select * from * ${_resource} where ... + * @deprecated Use {@code QueryConstants#RESOURCE_NAME} instead */ + @Deprecated public final static String RESOURCE_NAME = "_resource"; public static final String LAUNCHER_INSTALL_LOCATION = "launcher.install.location"; diff --git a/openidm-util/src/main/java/org/forgerock/openidm/metadata/MetaDataProvider.java b/openidm-util/src/main/java/org/forgerock/openidm/metadata/MetaDataProvider.java index 7b86a97e1b..a27fc3f14b 100644 --- a/openidm-util/src/main/java/org/forgerock/openidm/metadata/MetaDataProvider.java +++ b/openidm-util/src/main/java/org/forgerock/openidm/metadata/MetaDataProvider.java @@ -30,10 +30,8 @@ import org.forgerock.json.JsonValue; /** - * Meta data provider interface to describe configuration requirements of a - * bundle. Use a meta-data.json file to declare a meta data provider for a - * bundle* - * + * Meta data provider interface to describe configuration requirements of a bundle. + * Use a meta-data.json file to declare a meta data provider for a bundle. */ public interface MetaDataProvider { /** diff --git a/pom.xml b/pom.xml index 3f63fc1e88..5f86ff1882 100644 --- a/pom.xml +++ b/pom.xml @@ -123,7 +123,7 @@ - 1.7.11 + 1.7.14 22.6.0