diff --git a/pom.xml b/pom.xml index a577fa59..887d3721 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-r2dbc - 1.0.0.BUILD-SNAPSHOT + 1.0.0.gh-64-SNAPSHOT Spring Data R2DBC Spring Data module for R2DBC. diff --git a/src/main/asciidoc/reference/r2dbc-core.adoc b/src/main/asciidoc/reference/r2dbc-core.adoc new file mode 100644 index 00000000..87466e3e --- /dev/null +++ b/src/main/asciidoc/reference/r2dbc-core.adoc @@ -0,0 +1,241 @@ +The R2DBC support contains a wide range of features: + +* Spring configuration support with Java-based `@Configuration` classes for an R2DBC driver instance. +* `DatabaseClient` helper class that increases productivity when performing common R2DBC operations with integrated object mapping between rows and POJOs. +* Exception translation into Spring's portable Data Access Exception hierarchy. +* Feature-rich Object Mapping integrated with Spring's Conversion Service. +* Annotation-based mapping metadata that is extensible to support other metadata formats. +* Automatic implementation of Repository interfaces, including support for custom query methods. + +For most tasks, you should use `DatabaseClient` or the Repository support, which both leverage the rich mapping functionality. +`DatabaseClient` is the place to look for accessing functionality such as ad-hoc CRUD operations. + +[[r2dbc.getting-started]] +== Getting Started + +An easy way to bootstrap setting up a working environment is to create a Spring-based project through https://start.spring.io[start.spring.io]. + +. Add the following to the pom.xml files `dependencies` element: ++ +[source,xml,subs="+attributes"] +---- + + + + io.r2dbc + r2dbc-bom + ${r2dbc-releasetrain.version} + pom + import + + + + + + + + + + org.springframework.data + spring-data-r2dbc + {version} + + + + + io.r2dbc + r2dbc-h2 + {r2dbcVersion} + + + +---- +. Change the version of Spring in the pom.xml to be ++ +[source,xml,subs="+attributes"] +---- +{springVersion} +---- +. Add the following location of the Spring Milestone repository for Maven to your `pom.xml` such that it is at the same level of your `` element: ++ +[source,xml] +---- + + + spring-milestone + Spring Maven MILESTONE Repository + https://repo.spring.io/libs-milestone + + +---- + +The repository is also https://repo.spring.io/milestone/org/springframework/data/[browseable here]. + +You may also want to set the logging level to `DEBUG` to see some additional information. To do so, edit the `application.properties` file to have the following content: + +[source] +---- +logging.level.org.springframework.data.r2dbc=DEBUG +---- + +Then you can create a `Person` class to persist: + +[source,java] +---- +package org.spring.r2dbc.example; + +public class Person { + + private String id; + private String name; + private int age; + + public Person(String id, String name, int age) { + this.id = id; + this.name = name; + this.age = age; + } + + public String getId() { + return id; + } + public String getName() { + return name; + } + public int getAge() { + return age; + } + + @Override + public String toString() { + return "Person [id=" + id + ", name=" + name + ", age=" + age + "]"; + } +} +---- + +Next, you need to create a table structure in your database: + +[source,sql] +---- +CREATE TABLE person + (id VARCHAR(255) PRIMARY KEY, + name VARCHAR(255), + age INT); +---- + +You also need a main application to run: + +[source,java] +---- +package org.spring.r2dbc.example; + +public class R2dbcApp { + + private static final Log log = LogFactory.getLog(R2dbcApp.class); + + public static void main(String[] args) throws Exception { + + ConnectionFactory connectionFactory = ConnectionFactories.get("rdbc:h2:mem:///test?options=DB_CLOSE_DELAY=-1;DB_CLOSE_ON_EXIT=FALSE"); + + DatabaseClient client = DatabaseClient.create(connectionFactory); + + client.execute() + .sql("CREATE TABLE person" + + "(id VARCHAR(255) PRIMARY KEY," + + "name VARCHAR(255)," + + "age INT)") + .fetch() + .rowsUpdated() + .as(StepVerifier::create) + .expectNextCount(1) + .verifyComplete(); + + client.insert() + .into(Person.class) + .using(new Person("joe", "Joe", 34)) + .then() + .as(StepVerifier::create) + .verifyComplete(); + + client.select() + .from(Person.class) + .fetch() + .first() + .doOnNext(it -> log.info(it)) + .as(StepVerifier::create) + .expectNextCount(1) + .verifyComplete(); + } +} +---- + +When you run the main program, the preceding examples produce output similar to the following: + +[source] +---- +2018-11-28 10:47:03,893 DEBUG ata.r2dbc.function.DefaultDatabaseClient: 310 - Executing SQL statement [CREATE TABLE person + (id VARCHAR(255) PRIMARY KEY, + name VARCHAR(255), + age INT)] +2018-11-28 10:47:04,074 DEBUG ata.r2dbc.function.DefaultDatabaseClient: 908 - Executing SQL statement [INSERT INTO person (id, name, age) VALUES($1, $2, $3)] +2018-11-28 10:47:04,092 DEBUG ata.r2dbc.function.DefaultDatabaseClient: 575 - Executing SQL statement [SELECT id, name, age FROM person] +2018-11-28 10:47:04,436 INFO org.spring.r2dbc.example.R2dbcApp: 43 - Person [id='joe', name='Joe', age=34] +---- + +Even in this simple example, there are few things to notice: + +* You can create an instance of the central helper class in Spring Data R2DBC, <>, by using a standard `io.r2dbc.spi.ConnectionFactory` object. +* The mapper works against standard POJO objects without the need for any additional metadata (though you can optionally provide that information. See <>.). +* Mapping conventions can use field access. Notice that the `Person` class has only getters. +* If the constructor argument names match the column names of the stored row, they are used to instantiate the object. + +[[r2dbc.examples-repo]] +== Examples Repository + +There is a https://github.com/spring-projects/spring-data-examples[GitHub repository with several examples] that you can download and play around with to get a feel for how the library works. + +[[r2dbc.drivers]] +== Connecting to a Relational Database with Spring + +One of the first tasks when using relational databases and Spring is to create a `io.r2dbc.spi.ConnectionFactory` object using the IoC container. The following example explains Java-based configuration. + +[[r2dbc.connectionfactory]] +=== Registering a `ConnectionFactory` Instance using Java-based Metadata + +The following example shows an example of using Java-based bean metadata to register an instance of a `io.r2dbc.spi.ConnectionFactory`: + +.Registering a `io.r2dbc.spi.ConnectionFactory` object using Java-based bean metadata +==== +[source,java] +---- +@Configuration +public class ApplicationConfiguration extends AbstractR2dbcConfiguration { + + @Override + @Bean + public ConnectionFactory connectionFactory() { + return …; + } +} +---- +==== + +This approach lets you use the standard `io.r2dbc.spi.ConnectionFactory` instance, with the container using Spring's `AbstractR2dbcConfiguration`. As compared to registering a `ConnectionFactory` instance directly, the configuration support has the added advantage of also providing the container with an `ExceptionTranslator` implementation that translates R2DBC exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annotated with the `@Repository` annotation. This hierarchy and the use of `@Repository` is described in https://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/data-access.html[Spring's DAO support features]. + +`AbstractR2dbcConfiguration` registers also `DatabaseClient` that is required for database interaction and for Repository implementation. + +[[r2dbc.drivers]] +=== R2DBC Drivers + +Spring Data R2DBC supports drivers by R2DBC's pluggable SPI mechanism. Any driver implementing the R2DBC spec can be used with Spring Data R2DBC. +R2DBC is a relatively young initiative that gains significance by maturing through adoption. +As of writing the following drivers are available: + +* https://github.com/r2dbc/r2dbc-postgresql[Postgres] (`io.r2dbc:r2dbc-postgresql`) +* https://github.com/r2dbc/r2dbc-h2[H2] (`io.r2dbc:r2dbc-h2`) +* https://github.com/r2dbc/r2dbc-mssql[Microsoft SQL Server] (`io.r2dbc:r2dbc-mssql`) +* https://github.com/jasync-sql/jasync-sql[Microsoft SQL Server] (`com.github.jasync-sql:jasync-r2dbc-mysql`) + +Spring Data R2DBC reacts to database specifics by inspecting `ConnectionFactoryMetadata` and selects the appropriate database dialect. +You can configure an own `Dialect` if the used driver is not yet known to Spring Data R2DBC. + diff --git a/src/main/asciidoc/reference/r2dbc-databaseclient.adoc b/src/main/asciidoc/reference/r2dbc-databaseclient.adoc new file mode 100644 index 00000000..5cbb8652 --- /dev/null +++ b/src/main/asciidoc/reference/r2dbc-databaseclient.adoc @@ -0,0 +1,111 @@ +[[r2dbc.datbaseclient]] += Introduction to `DatabaseClient` + +Spring Data R2DBC includes a reactive, non-blocking `DatabaseClient` for database interaction. The client has a functional, fluent API with reactive types for declarative composition. +`DatabaseClient` encapsulates resource handling such as opening and closing connections so your application code can make use of executing SQL queries or calling higher-level functionality such as inserting or selecting data. + +NOTE: `DatabaseClient` is a young application component providing a minimal set of convenience methods that is likely to be extended through time. + +NOTE: Once configured, `DatabaseClient` is thread-safe and can be reused across multiple instances. + +Another central feature of `DatabaseClient` is translation of exceptions thrown by R2DBC drivers into Spring's portable Data Access Exception hierarchy. See "`<>`" for more information. + +The next section contains an example of how to work with the `DatabaseClient` in the context of the Spring container. + +[[r2dbc.datbaseclient.create]] +== Creating `DatabaseClient` + +The simplest way to create a `DatabaseClient` is through a static factory method: + +[source,java] +---- +DatabaseClient.create(ConnectionFactory connectionFactory) +---- + +The above method creates a `DatabaseClient` with default settings. + +You can also use `DatabaseClient.builder()` with further options to customize the client: + +* `exceptionTranslator`: Supply a specific `R2dbcExceptionTranslator` to customize how R2DBC exceptions are translated into Spring's portable Data Access Exception hierarchy. See "`<>`" for more information. +* `dataAccessStrategy`: Strategy how SQL queries are generated and how objects are mapped. + +Once built, a `DatabaseClient` instance is immutable. However, you can clone it and build a modified copy without affecting the original instance, as the following example shows: + +[source,java] +---- +DatabaseClient client1 = DatabaseClient.builder() + .exceptionTranslator(exceptionTranslatorA).build(); + +DatabaseClient client2 = client1.mutate() + .exceptionTranslator(exceptionTranslatorB).build(); +---- + +== Controlling Database Connections + +Spring Data R2DBC obtains a connection to the database through a `ConnectionFactory`. +A `ConnectionFactory` is part of the R2DBC specification and is a generalized connection factory. +It lets a container or a framework hide connection pooling and transaction management issues from the application code. + +When you use Spring Data R2DBC, you can create a `ConnectionFactory` using your R2DBC driver. +`ConnectionFactory` implementations can either return the same connection, different connections or provide connection pooling. +`DatabaseClient` uses `ConnectionFactory` to create and release connections per operation without affinity to a particular connection across multiple operations. + +[[r2dbc.exception]] += Exception Translation + +The Spring framework provides exception translation for a wide variety of database and mapping technologies. +The Spring support for R2DBC extends this feature by providing implementations of the `R2dbcExceptionTranslator` interface. + +`R2dbcExceptionTranslator` is an interface to be implemented by classes that can translate between `R2dbcException` and Spring’s own `org.springframework.dao.DataAccessException`, which is agnostic in regard to data access strategy. +Implementations can be generic (for example, using SQLState codes) or proprietary (for example, using Postgres error codes) for greater precision. + +`R2dbcExceptionSubclassTranslator` is the implementation of `R2dbcExceptionTranslator` that is used by default. +It considers R2DBC's categorized exception hierarchy to translate these into Spring's consistent exception hierarchy. +`R2dbcExceptionSubclassTranslator` uses `SqlStateR2dbcExceptionTranslator` as fallback if it is not able to translate an exception. + +`SqlErrorCodeR2dbcExceptionTranslator` uses specific vendor codes using Spring JDBC's `SQLErrorCodes`. +It is more precise than the SQLState implementation. +The error code translations are based on codes held in a JavaBean type class called `SQLErrorCodes`. +This class is created and populated by an `SQLErrorCodesFactory`, which (as the name suggests) is a factory for creating SQLErrorCodes based on the contents of a configuration file named `sql-error-codes.xml` from Spring's Data Access module. +This file is populated with vendor codes and based on the `ConnectionFactoryName` taken from `ConnectionFactoryMetadata`. +The codes for the actual database you are using are used. + +The `SqlErrorCodeR2dbcExceptionTranslator` applies matching rules in the following sequence: + +1. Any custom translation implemented by a subclass. Normally, the provided concrete `SqlErrorCodeR2dbcExceptionTranslator` is used, so this rule does not apply. It applies only if you have actually provided a subclass implementation. +2. Any custom implementation of the `SQLExceptionTranslator` interface that is provided as the `customSqlExceptionTranslator` property of the `SQLErrorCodes` class. +3. Error code matching is applied. +4. Use a fallback translator. + +NOTE: The `SQLErrorCodesFactory` is used by default to define Error codes and custom exception translations. They are looked up in a file named `sql-error-codes.xml` from the classpath, and the matching `SQLErrorCodes` instance is located based on the database name from the database metadata of the database in use. `SQLErrorCodesFactory` requires Spring JDBC. + +You can extend `SqlErrorCodeR2dbcExceptionTranslator`, as the following example shows: + +[source,java] +---- +public class CustomSqlErrorCodeR2dbcExceptionTranslator extends SqlErrorCodeR2dbcExceptionTranslator { + + protected DataAccessException customTranslate(String task, String sql, R2dbcException r2dbcex) { + if (sqlex.getErrorCode() == -12345) { + return new DeadlockLoserDataAccessException(task, r2dbcex); + } + return null; + } +} +---- + +In the preceding example, the specific error code (`-12345`) is translated, while other errors are left to be translated by the default translator implementation. +To use this custom translator, you must configure `DatabaseClient` through the builder method `exceptionTranslator`, and you must use this `DatabaseClient` for all of the data access processing where this translator is needed. +The following example shows how you can use this custom translator: + +[source,java] +---- +ConnectionFactory connectionFactory = …; + +CustomSqlErrorCodeR2dbcExceptionTranslator exceptionTranslator = new CustomSqlErrorCodeR2dbcExceptionTranslator(); + +DatabaseClient client = DatabaseClient.builder() + .connectionFactory(connectionFactory) + .exceptionTranslator(exceptionTranslator) + .build(); +---- diff --git a/src/main/asciidoc/reference/r2dbc-fluent.adoc b/src/main/asciidoc/reference/r2dbc-fluent.adoc new file mode 100644 index 00000000..7ac0eadd --- /dev/null +++ b/src/main/asciidoc/reference/r2dbc-fluent.adoc @@ -0,0 +1,239 @@ +[[r2dbc.datbaseclient.fluent-api]] += Fluent Data Access API + +You have already seen ``DatabaseClient``s SQL API that offers you maximum flexibility to execute any type of SQL. +`DatabaseClient` provides a more narrow interface for typical ad-hoc use-cases such as querying, inserting, updating, and deleting data. + +The entry points (`insert()`, `select()`, `update()`, and others) follow a natural naming schema based on the operation to be run. Moving on from the entry point, the API is designed to offer only context-dependent methods that lead to a terminating method that creates and runs a SQL statement. Spring Data R2DBC uses a `Dialect` abstraction to determine bind markers, pagination support and data types natively supported by the underlying driver. + +Let's take a look at a simple query: + +==== +[source,java] +---- +Flux people = databaseClient.select() + .from(Person.class) <1> + .fetch() + .all(); <2> +---- +<1> Using `Person` with the `from(…)` method sets the `FROM` table based on mapping metadata. It also maps tabular results on `Person` result objects. +<2> Fetching `all()` rows returns a `Flux` without limiting results. +==== + +The following example declares a more complex query that specifies the table name by name, a `WHERE` condition and `ORDER BY` clause: + +==== +[source,java] +---- +Mono first = databaseClient.select() + .from("legoset") <1> + .matching(where("firstname").is("John") <2> + .and("lastname").in("Doe", "White")) + .orderBy(desc("id")) <3> + .as(Person.class) + .fetch() + .one(); <4> +---- +<1> Selecting from a table by name returns row results as `Map` with case-insensitive column name matching. +<2> The issued query declares a `WHERE` condition on `firstname` and `lastname` columns to filter results. +<3> Results can be ordered by individual column names resulting in an `ORDER BY` clause. +<4> Selecting the one result fetches just a single row. This way of consuming rows expects the query to return exactly a single result. `Mono` emits a `IncorrectResultSizeDataAccessException` if the query yields more than a single result. +==== + +You can consume Query results in three ways: + +* Through object mapping (e.g. `as(Class)`) using Spring Data's mapping-metadata. +* As `Map` where column names are mapped to their value. Column names are looked up case-insensitive. +* By supplying a mapping `BiFunction` for direct access to R2DBC `Row` and `RowMetadata` + +You can switch between retrieving a single entity and retrieving multiple entities as through the terminating methods: + +* `first()`: Consume only the first row returning a `Mono`. The returned `Mono` completes without emitting an object if the query returns no results. +* `one()`: Consume exactly one row returning a `Mono`. The returned `Mono` completes without emitting an object if the query returns no results. If the query returns more than row then `Mono` completes exceptionally emitting `IncorrectResultSizeDataAccessException`. +* `all()`: Consume all returned rows returning a `Flux`. +* `rowsUpdated`: Consume the number of affected rows. Typically used with `INSERT`/`UPDATE`/`DELETE` statements. + +[[r2dbc.datbaseclient.fluent-api.select]] +== Selecting Data + +Use the `select()` entry point to express your `SELECT` queries. +The resulting `SELECT` queries support the commonly used clauses `WHERE`, `ORDER BY` and support pagination. +The fluent API style allows you to chain together multiple methods while having easy-to-understand code. +To improve readability, use static imports that allow you avoid using the 'new' keyword for creating `Criteria` instances. + +[r2dbc.datbaseclient.fluent-api.criteria]] +==== Methods for the Criteria Class + +The `Criteria` class provides the following methods, all of which correspond to SQL operators: + +* `Criteria` *and* `(String column)` Adds a chained `Criteria` with the specified `property` to the current `Criteria` and returns the newly created one. +* `Criteria` *or* `(String column)` Adds a chained `Criteria` with the specified `property` to the current `Criteria` and returns the newly created one. +* `Criteria` *greaterThan* `(Object o)` Creates a criterion using the `>` operator. +* `Criteria` *greaterThanOrEquals* `(Object o)` Creates a criterion using the `>=` operator. +* `Criteria` *in* `(Object... o)` Creates a criterion using the `IN` operator for a varargs argument. +* `Criteria` *in* `(Collection collection)` Creates a criterion using the `IN` operator using a collection. +* `Criteria` *is* `(Object o)` Creates a criterion using column matching (`property = value`). +* `Criteria` *isNull* `()` Creates a criterion using the `IS NULL` operator. +* `Criteria` *isNotNull* `()` Creates a criterion using the `IS NOT NULL` operator. +* `Criteria` *lessThan* `(Object o)` Creates a criterion using the `<` operator. +* `Criteria` *lessThanOrEquals* `(Object o)` Creates a criterion using the `<=` operator. +* `Criteria` *like* `(Object o)` Creates a criterion using the `LIKE` operator without escape character processing. +* `Criteria` *not* `(Object o)` Creates a criterion using the `!=` operator. +* `Criteria` *notIn* `(Object... o)` Creates a criterion using the `NOT IN` operator for a varargs argument. +* `Criteria` *notIn* `(Collection collection)` Creates a criterion using the `NOT IN` operator using a collection. + +You can use `Criteria` with `SELECT`, `UPDATE`, and `DELETE` queries. + +[r2dbc.datbaseclient.fluent-api.select.methods]] +==== Methods for SELECT operations + +The `select()` entry point exposes some additional methods that provide options for the query: + +* *from* `(Class)` used to specify the source table using a mapped object. Returns results by default as `T`. +* *from* `(String)` used to specify the source table name. Returns results by default as `Map`. +* *as* `(Class)` used to map results to `T`. +* *map* `(BiFunction)` used to supply a mapping function to extract results. +* *project* `(String... columns)` used to specify which columns to return. +* *matching* `(Criteria)` used to declare a `WHERE` condition to filter results. +* *orderBy* `(Order)` used to declare a `ORDER BY` clause to sort results. +* *page* `(Page pageable)` used to retrieve a particular page within the result. Limits the size of the returned results and reads from a offset. +* *fetch* `()` transition call declaration to the fetch stage to declare result consumption multiplicity. + +[[r2dbc.datbaseclient.fluent-api.insert]] +== Inserting Data + +Use the `insert()` entry point to insert data. Similar to `select()`, `insert()` allows free-form and mapped object inserts. + +Take a look at a simple typed insert operation: + +==== +[source,java] +---- +Mono insert = databaseClient.insert() + .into(Person.class) <1> + .using(new Person(…)) <2> + .then(); <3> +---- +<1> Using `Person` with the `into(…)` method sets the `INTO` table based on mapping metadata. It also prepares the insert statement to accept `Person` objects for inserting. +<2> Provide a scalar `Person` object. Alternatively, you can supply a `Publisher` to execute a stream of `INSERT` statements. This method extracts all non-``null`` values and inserts these. +<3> Use `then()` to just insert an object without consuming further details. Modifying statements allow consumption of the number of affected rows or tabular results for consuming generated keys. +==== + +Inserts also support untyped operations: + +==== +[source,java] +---- +Mono insert = databaseClient.insert() + .into("person") <1> + .value("firstname", "John") <2> + .nullValue("lastname") <3> + .then(); <4> +---- +<1> Start an insert into the `person` table. +<2> Provide a non-null value for `firstname`. +<3> Set `lastname` to `null`. +<3> Use `then()` to just insert an object without consuming further details. Modifying statements allow consumption of the number of affected rows or tabular results for consuming generated keys. +==== + +[r2dbc.datbaseclient.fluent-api.insert.methods]] +==== Methods for INSERT operations + +The `insert()` entry point exposes some additional methods that provide options for the operation: + +* *into* `(Class)` used to specify the target table using a mapped object. Returns results by default as `T`. +* *into* `(String)` used to specify the target table name. Returns results by default as `Map`. +* *using* `(T)` used to specify the object to insert. +* *using* `(Publisher)` used to accept a stream of objects to insert. +* *table* `(String)` used to override the target table name. +* *value* `(String, Object)` used to provide a column value to insert. +* *nullValue* `(String)` used to provide a null value to insert. +* *map* `(BiFunction)` used to supply a mapping function to extract results. +* *then* `()` execute `INSERT` without consuming any results. +* *fetch* `()` transition call declaration to the fetch stage to declare result consumption multiplicity. + +[[r2dbc.datbaseclient.fluent-api.update]] +== Updating Data + +Use the `update()` entry point to update rows. +Updating data starts with a specification of the table to update accepting `Update` specifying assignments. It also accepts `Criteria` to create a `WHERE` clause. + +Take a look at a simple typed update operation: + +==== +[source,java] +---- +Person modified = … + +Mono update = databaseClient.update() + .table(Person.class) <1> + .using(modified) <2> + .then(); <3> +---- +<1> Using `Person` with the `table(…)` method sets the table to update based on mapping metadata. +<2> Provide a scalar `Person` object value. `using(…)` accepts the modified object and derives primary keys and updates all column values. +<3> Use `then()` to just update rows an object without consuming further details. Modifying statements allow also consumption of the number of affected rows. +==== + +Update also support untyped operations: + +==== +[source,java] +---- +Mono update = databaseClient.update() + .table("person") <1> + .using(Update.update("firstname", "Jane")) <2> + .matching(where("firstname").is("John")) <3> + .then(); <4> +---- +<1> Update table `person`. +<2> Provide a `Update` definition, which columns to update. +<3> The issued query declares a `WHERE` condition on `firstname` columns to filter rows to update. +<4> Use `then()` to just update rows an object without consuming further details. Modifying statements allow also consumption of the number of affected rows. +==== + +[r2dbc.datbaseclient.fluent-api.update.methods]] +==== Methods for UPDATE operations + +The `update()` entry point exposes some additional methods that provide options for the operation: + +* *table* `(Class)` used to specify the target table using a mapped object. Returns results by default as `T`. +* *table* `(String)` used to specify the target table name. Returns results by default as `Map`. +* *using* `(T)` used to specify the object to update. Derives criteria itself. +* *using* `(Update)` used to specify the update definition. +* *matching* `(Criteria)` used to declare a `WHERE` condition to rows to update. +* *then* `()` execute `UPDATE` without consuming any results. +* *fetch* `()` transition call declaration to the fetch stage to fetch the number of updated rows. + +[[r2dbc.datbaseclient.fluent-api.delete]] +== Deleting Data + +Use the `delete()` entry point to delete rows. +Removing data starts with a specification of the table to delete from and optionally accepts a `Criteria` to create a `WHERE` clause. + +Take a look at a simple insert operation: + +==== +[source,java] +---- +Mono delete = databaseClient.delete() + .from(Person.class) <1> + .matching(where("firstname").is("John") <2> + .and("lastname").in("Doe", "White")) + .then(); <3> +---- +<1> Using `Person` with the `from(…)` method sets the `FROM` table based on mapping metadata. +<2> The issued query declares a `WHERE` condition on `firstname` and `lastname` columns to filter rows to delete. +<3> Use `then()` to just delete rows an object without consuming further details. Modifying statements allow also consumption of the number of affected rows. +==== + +[r2dbc.datbaseclient.fluent-api.delete.methods]] +==== Methods for DELETE operations + +The `delete()` entry point exposes some additional methods that provide options for the operation: + +* *from* `(Class)` used to specify the target table using a mapped object. Returns results by default as `T`. +* *from* `(String)` used to specify the target table name. Returns results by default as `Map`. +* *matching* `(Criteria)` used to declare a `WHERE` condition to rows to delete. +* *then* `()` execute `DELETE` without consuming any results. +* *fetch* `()` transition call declaration to the fetch stage to fetch the number of deleted rows. diff --git a/src/main/asciidoc/reference/r2dbc-sql.adoc b/src/main/asciidoc/reference/r2dbc-sql.adoc new file mode 100644 index 00000000..eb824d08 --- /dev/null +++ b/src/main/asciidoc/reference/r2dbc-sql.adoc @@ -0,0 +1,163 @@ +[[r2dbc.datbaseclient.statements]] += Executing Statements + +Running a statement is the basic functionality that is covered by `DatabaseClient`. +The following example shows what you need to include for minimal but fully functional code that creates a new table: + +[source,java] +---- +Mono completion = client.execute() + .sql("CREATE TABLE person (id VARCHAR(255) PRIMARY KEY, name VARCHAR(255), age INTEGER);") + .then(); +---- + +`DatabaseClient` is designed for a convenient fluent usage. +It exposes intermediate, continuation, and terminal methods at each stage of the execution specification. +The example above uses `then()` to return a completion `Publisher` that completes as soon as the query (or queries, if the SQL query contains multiple statements) completes. + +NOTE: `execute().sql(…)` accepts either the SQL query string or a query `Supplier` to defer the actual query creation until execution. + +[[r2dbc.datbaseclient.queries]] +== Running Queries + +SQL queries can return values or the number of affected rows. +`DatabaseClient` can return the number of updated rows or the rows themselves, depending on the issued query. + +The following example shows an `UPDATE` statement that returns the number of updated rows: + +[source,java] +---- +Mono affectedRows = client.execute() + .sql("UPDATE person SET name = 'Joe'") + .fetch().rowsUpdated(); +---- + +Running a `SELECT` query returns a different type of result, in particular tabular results. Tabular data is typically consumed by streaming each `Row`. +You might have noticed the use of `fetch()` in the previous example. +`fetch()` is a continuation operator that allows you to specify how much data you want to consume. + +[source,java] +---- +Mono> first = client.execute() + .sql("SELECT id, name FROM person") + .fetch().first(); +---- + +Calling `first()` returns the first row from the result and discards remaining rows. +You can consume data with the following operators: + +* `first()` return the first row of the entire result +* `one()` returns exactly one result and fails if the result contains more rows. +* `all()` returns all rows of the result +* `rowsUpdated()` returns the number of affected rows (`INSERT` count, `UPDATE` count) + +`DatabaseClient` queries return their results by default as `Map` of column name to value. You can customize type mapping by applying an `as(Class)` operator. + +[source,java] +---- +Flux all = client.execute() + .sql("SELECT id, name FROM mytable") + .as(Person.class) + .fetch().all(); +---- + +`as(…)` applies <> and maps the resulting columns to your POJO. + +[[r2dbc.datbaseclient.mapping]] +== Mapping Results + +You can customize result extraction beyond `Map` and POJO result extraction by providing an extractor `BiFunction`. +The extractor function interacts directly with R2DBC's `Row` and `RowMetadata` objects and can return arbitrary values (singular values, collections/maps, objects). + +The following example extracts the `id` column and emits its value: + +[source,java] +---- +Flux names= client.execute() + .sql("SELECT name FROM person") + .map((row, rowMetadata) -> row.get("id", String.class)) + .all(); +---- + +[[r2dbc.datbaseclient.mapping.null]] +.What about `null`? +**** +Relational database results may contain `null` values. +Reactive Streams forbids emission of `null` values which requires a proper `null` handling in the extractor function. +While you can obtain `null` values from a `Row`, you must not emit a `null` value. +You must wrap any `null` values in an object (e.g. `Optional` for singular values) to make sure a `null` value is never returned directly by your extractor function. +**** + +[[r2dbc.datbaseclient.binding]] +== Binding Values to Queries + +A typical application requires parameterized SQL statements to select or update rows according to some input. +These are typically `SELECT` statements constrained by a `WHERE` clause or `INSERT`/`UPDATE` statements accepting input parameters. +Parameterized statements bear the risk of SQL injection if parameters are not escaped properly. +`DatabaseClient` leverages R2DBC's Bind API to eliminate the risk of SQL injection for query parameters. +You can provide a parameterized SQL statement with the `sql(…)` operator and bind parameters to the actual `Statement`. +Your R2DBC driver then executes the statement using prepared statements and parameter substitution. + +Parameter binding supports various binding strategies: + +* By Index using zero-based parameter indexes. +* By Name using the placeholder name. + +The following example shows parameter binding for a query: + +[source,java] +---- +db.execute() + .sql("INSERT INTO person (id, name, age) VALUES(:id, :name, :age)") + .bind("id", "joe") + .bind("name", "Joe") + .bind("age", 34); +---- + +.R2DBC Native Bind Markers +**** +R2DBC uses database-native bind markers that depend on the actual database vendor. +As an example, Postgres uses indexed markers such as `$1`, `$2`, `$n`. +Another example is SQL Server that uses named bind markers prefixed with `@` (at). + +This is different from JDBC which requires `?` (question mark) as bind markers. +In JDBC, the actual drivers translate question mark bind markers to database-native markers as part of their statement execution. + +Spring Data R2DBC allows you to use native bind markers or named bind markers with the `:name` syntax. + +Named parameter support leverages ``Dialect``s to expand named parameters to native bind markers at the time of query execution which gives you a certain degree of query portability across various database vendors. +**** + +The query-preprocessor unrolls named `Collection` parameters into a series of bind markers to remove the need of dynamic query creation based on the number of arguments. +Nested object arrays are expanded to allow usage of e.g. select lists. + +Consider the following query: + +[source,sql] +---- +SELECT id, name, state FROM table WHERE (name, age) IN (('John', 35), ('Ann', 50)) +---- + +This query can be parametrized and executed as: + +[source,java] +---- +List tuples = new ArrayList<>(); +tuples.add(new Object[] {"John", 35}); +tuples.add(new Object[] {"Ann", 50}); + +db.execute() + .sql("SELECT id, name, state FROM table WHERE (name, age) IN (:tuples)") + .bind("tuples", tuples); +---- + +NOTE: Usage of select lists is vendor-dependent. + +A simpler variant using `IN` predicates: + +[source,java] +---- +db.execute() + .sql("SELECT id, name, state FROM table WHERE age IN (:ages)") + .bind("ages", Arrays.asList(35, 50)); +---- diff --git a/src/main/asciidoc/reference/r2dbc-transactions.adoc b/src/main/asciidoc/reference/r2dbc-transactions.adoc new file mode 100644 index 00000000..f85db46f --- /dev/null +++ b/src/main/asciidoc/reference/r2dbc-transactions.adoc @@ -0,0 +1,29 @@ +[[r2dbc.datbaseclient.transactions]] += Transactions + +A common pattern when using relational databases is grouping multiple queries within a unit of work that is guarded by a transaction. +Relational databases typically associate a transaction with a single transport connection. +Using different connections hence results in utilizing different transactions. +Spring Data R2DBC includes a transactional `DatabaseClient` implementation with `TransactionalDatabaseClient` that allows you to group multiple statements within the same transaction. +`TransactionalDatabaseClient` is a extension of `DatabaseClient` that exposes the same functionality as `DatabaseClient` and adds transaction-management methods. + +You can run multiple statements within a transaction using the `inTransaction(Function)` closure: + +[source,java] +---- +TransactionalDatabaseClient databaseClient = TransactionalDatabaseClient.create(connectionFactory); + +Flux completion = databaseClient.inTransaction(db -> { + + return db.execute().sql("INSERT INTO person (id, name, age) VALUES(:id, :name, :age)") + .bind("id", "joe") + .bind("name", "Joe") + .bind("age", 34) + .fetch().rowsUpdated() + .then(db.execute().sql("INSERT INTO contacts (id, name) VALUES(:id, :name)") + .bind("id", "joe") + .bind("name", "Joe") + .fetch().rowsUpdated()) + .then(); +}); +---- diff --git a/src/main/asciidoc/reference/r2dbc.adoc b/src/main/asciidoc/reference/r2dbc.adoc index 27802ce5..27e5f3e1 100644 --- a/src/main/asciidoc/reference/r2dbc.adoc +++ b/src/main/asciidoc/reference/r2dbc.adoc @@ -1,536 +1,12 @@ [[r2dbc.core]] = R2DBC support -The R2DBC support contains a wide range of features: +include::r2dbc-core.adoc[] -* Spring configuration support with Java-based `@Configuration` classes for an R2DBC driver instance. -* `DatabaseClient` helper class that increases productivity when performing common R2DBC operations with integrated object mapping between rows and POJOs. -* Exception translation into Spring's portable Data Access Exception hierarchy. -* Feature-rich Object Mapping integrated with Spring's Conversion Service. -* Annotation-based mapping metadata that is extensible to support other metadata formats. -* Automatic implementation of Repository interfaces, including support for custom query methods. +include::r2dbc-databaseclient.adoc[leveloffset=+1] -For most tasks, you should use `DatabaseClient` or the Repository support, which both leverage the rich mapping functionality. -`DatabaseClient` is the place to look for accessing functionality such as ad-hoc CRUD operations. +include::r2dbc-sql.adoc[leveloffset=+1] -[[r2dbc.getting-started]] -== Getting Started +include::r2dbc-fluent.adoc[leveloffset=+1] -An easy way to bootstrap setting up a working environment is to create a Spring-based project through https://start.spring.io[start.spring.io]. - -. Add the following to the pom.xml files `dependencies` element: -+ -[source,xml,subs="+attributes"] ----- - - - - - - org.springframework.data - spring-data-r2dbc - {version} - - - - - io.r2dbc - r2dbc-h2 - {r2dbcVersion} - - - ----- -. Change the version of Spring in the pom.xml to be -+ -[source,xml,subs="+attributes"] ----- -{springVersion} ----- -. Add the following location of the Spring Milestone repository for Maven to your `pom.xml` such that it is at the same level of your `` element: -+ -[source,xml] ----- - - - spring-milestone - Spring Maven MILESTONE Repository - https://repo.spring.io/libs-milestone - - ----- - -The repository is also https://repo.spring.io/milestone/org/springframework/data/[browseable here]. - -You may also want to set the logging level to `DEBUG` to see some additional information. To do so, edit the `application.properties` file to have the following content: - -[source] ----- -logging.level.org.springframework.data.r2dbc=DEBUG ----- - -Then you can create a `Person` class to persist: - -[source,java] ----- -package org.spring.r2dbc.example; - -public class Person { - - private String id; - private String name; - private int age; - - public Person(String id, String name, int age) { - this.id = id; - this.name = name; - this.age = age; - } - - public String getId() { - return id; - } - public String getName() { - return name; - } - public int getAge() { - return age; - } - - @Override - public String toString() { - return "Person [id=" + id + ", name=" + name + ", age=" + age + "]"; - } -} ----- - -Next, you need to create a table structure in your database: - -[source,sql] ----- -CREATE TABLE person - (id VARCHAR(255) PRIMARY KEY, - name VARCHAR(255), - age INT); ----- - -You also need a main application to run: - -[source,java] ----- -package org.spring.r2dbc.example; - -public class R2dbcApp { - - private static final Log log = LogFactory.getLog(R2dbcApp.class); - - public static void main(String[] args) throws Exception { - - ConnectionFactory connectionFactory = new H2ConnectionFactory(H2ConnectionConfiguration.builder() - .url("mem:test;DB_CLOSE_DELAY=10") - .build()); - - DatabaseClient client = DatabaseClient.create(connectionFactory); - - client.execute() - .sql("CREATE TABLE person" + - "(id VARCHAR(255) PRIMARY KEY," + - "name VARCHAR(255)," + - "age INT)") - .fetch() - .rowsUpdated() - .as(StepVerifier::create) - .expectNextCount(1) - .verifyComplete(); - - client.insert() - .into(Person.class) - .using(new Person("joe", "Joe", 34)) - .then() - .as(StepVerifier::create) - .verifyComplete(); - - client.select() - .from(Person.class) - .fetch() - .first() - .doOnNext(it -> log.info(it)) - .as(StepVerifier::create) - .expectNextCount(1) - .verifyComplete(); - } -} ----- - -When you run the main program, the preceding examples produce output similar to the following: - -[source] ----- -2018-11-28 10:47:03,893 DEBUG ata.r2dbc.function.DefaultDatabaseClient: 310 - Executing SQL statement [CREATE TABLE person - (id VARCHAR(255) PRIMARY KEY, - name VARCHAR(255), - age INT)] -2018-11-28 10:47:04,074 DEBUG ata.r2dbc.function.DefaultDatabaseClient: 908 - Executing SQL statement [INSERT INTO person (id, name, age) VALUES($1, $2, $3)] -2018-11-28 10:47:04,092 DEBUG ata.r2dbc.function.DefaultDatabaseClient: 575 - Executing SQL statement [SELECT id, name, age FROM person] -2018-11-28 10:47:04,436 INFO org.spring.r2dbc.example.R2dbcApp: 43 - Person [id='joe', name='Joe', age=34] ----- - -Even in this simple example, there are few things to notice: - -* You can create an instance of the central helper class in Spring Data R2DBC, <>, by using a standard `io.r2dbc.spi.ConnectionFactory` object. -* The mapper works against standard POJO objects without the need for any additional metadata (though you can optionally provide that information. See <>.). -* Mapping conventions can use field access. Notice that the `Person` class has only getters. -* If the constructor argument names match the column names of the stored row, they are used to instantiate the object. - -[[r2dbc.examples-repo]] -== Examples Repository - -There is a https://github.com/spring-projects/spring-data-examples[GitHub repository with several examples] that you can download and play around with to get a feel for how the library works. - -[[r2dbc.drivers]] -== Connecting to a Relational Database with Spring - -One of the first tasks when using relational databases and Spring is to create a `io.r2dbc.spi.ConnectionFactory` object using the IoC container. The following example explains Java-based configuration. - -[[r2dbc.connectionfactory]] -=== Registering a `ConnectionFactory` Instance using Java-based Metadata - -The following example shows an example of using Java-based bean metadata to register an instance of a `io.r2dbc.spi.ConnectionFactory`: - -.Registering a `io.r2dbc.spi.ConnectionFactory` object using Java-based bean metadata -==== -[source,java] ----- -@Configuration -public class ApplicationConfiguration extends AbstractR2dbcConfiguration { - - @Override - @Bean - public ConnectionFactory connectionFactory() { - return …; - } -} ----- -==== - -This approach lets you use the standard `io.r2dbc.spi.ConnectionFactory` instance, with the container using Spring's `AbstractR2dbcConfiguration`. As compared to registering a `ConnectionFactory` instance directly, the configuration support has the added advantage of also providing the container with an `ExceptionTranslator` implementation that translates R2DBC exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annotated with the `@Repository` annotation. This hierarchy and the use of `@Repository` is described in https://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/data-access.html[Spring's DAO support features]. - -`AbstractR2dbcConfiguration` registers also `DatabaseClient` that is required for database interaction and for Repository implementation. - -[[r2dbc.drivers]] -=== R2DBC Drivers - -Spring Data R2DBC supports drivers by R2DBC's pluggable SPI mechanism. Any driver implementing the R2DBC spec can be used with Spring Data R2DBC. -R2DBC is a relatively young initiative that gains significance by maturing through adoption. -As of writing the following 3 drivers are available: - -* https://github.com/r2dbc/r2dbc-postgresql[Postgres] (`io.r2dbc:r2dbc-postgresql`) -* https://github.com/r2dbc/r2dbc-h2[H2] (`io.r2dbc:r2dbc-h2`) -* https://github.com/r2dbc/r2dbc-mssql[Microsoft SQL Server] (`io.r2dbc:r2dbc-mssql`) - -Spring Data R2DBC reacts to database specifics by inspecting `ConnectionFactoryMetadata` and selects the appropriate database dialect. -You can configure an own `Dialect` if the used driver is not yet known to Spring Data R2DBC. - -[[r2dbc.datbaseclient]] -== Introduction to `DatabaseClient` - -Spring Data R2DBC includes a reactive, non-blocking `DatabaseClient` for database interaction. The client has a functional, fluent API with reactive types for declarative composition. -`DatabaseClient` encapsulates resource handling such as opening and closing connections so your application code can make use of executing SQL queries or calling higher-level functionality such as inserting or selecting data. - -NOTE: `DatabaseClient` is a young application component providing a minimal set of convenience methods that is likely to be extended through time. - -NOTE: Once configured, `DatabaseClient` is thread-safe and can be reused across multiple instances. - -Another central feature of `DatabaseClient` is translation of exceptions thrown by R2DBC drivers into Spring's portable Data Access Exception hierarchy. See "`<>`" for more information. - -The next section contains an example of how to work with the `DatabaseClient` in the context of the Spring container. - -[[r2dbc.datbaseclient.create]] -=== Creating `DatabaseClient` - -The simplest way to create a `DatabaseClient` is through a static factory method: - -[source,java] ----- -DatabaseClient.create(ConnectionFactory connectionFactory) ----- - -The above method creates a `DatabaseClient` with default settings. - -You can also use `DatabaseClient.builder()` with further options to customize the client: - -* `exceptionTranslator`: Supply a specific `R2dbcExceptionTranslator` to customize how R2DBC exceptions are translated into Spring's portable Data Access Exception hierarchy. See "`<>`" for more information. -* `dataAccessStrategy`: Strategy how SQL queries are generated and how objects are mapped. - -Once built, a `DatabaseClient` instance is immutable. However, you can clone it and build a modified copy without affecting the original instance, as the following example shows: - -[source,java] ----- -DatabaseClient client1 = DatabaseClient.builder() - .exceptionTranslator(exceptionTranslatorA).build(); - -DatabaseClient client2 = client1.mutate() - .exceptionTranslator(exceptionTranslatorB).build(); ----- - -=== Controlling Database Connections - -Spring Data R2DBC obtains a connection to the database through a `ConnectionFactory`. -A `ConnectionFactory` is part of the R2DBC specification and is a generalized connection factory. -It lets a container or a framework hide connection pooling and transaction management issues from the application code. - -When you use Spring Data R2DBC, you can create a `ConnectionFactory` using your R2DBC driver. -`ConnectionFactory` implementations can either return the same connection, different connections or provide connection pooling. -`DatabaseClient` uses `ConnectionFactory` to create and release connections per operation without affinity to a particular connection across multiple operations. - -[[r2dbc.exception]] -== Exception Translation - -The Spring framework provides exception translation for a wide variety of database and mapping technologies. -This has traditionally been for JDBC and JPA. The Spring support for R2DBC extends this feature by providing implementations of the `R2dbcExceptionTranslator` interface. - -`R2dbcExceptionTranslator` is an interface to be implemented by classes that can translate between `R2dbcException` and Spring’s own `org.springframework.dao.DataAccessException`, which is agnostic in regard to data access strategy. -Implementations can be generic (for example, using SQLState codes) or proprietary (for example, using Postgres error codes) for greater precision. - -`SqlErrorCodeR2dbcExceptionTranslator` is the implementation of `R2dbcExceptionTranslator` that is used by default. -This implementation uses specific vendor codes. -It is more precise than the SQLState implementation. -The error code translations are based on codes held in a JavaBean type class called `SQLErrorCodes`. -This class is created and populated by an `SQLErrorCodesFactory`, which (as the name suggests) is a factory for creating SQLErrorCodes based on the contents of a configuration file named `sql-error-codes.xml` from Spring's Data Access module. -This file is populated with vendor codes and based on the `ConnectionFactoryName` taken from `ConnectionFactoryMetadata`. -The codes for the actual database you are using are used. - -The `SqlErrorCodeR2dbcExceptionTranslator` applies matching rules in the following sequence: - -1. Any custom translation implemented by a subclass. Normally, the provided concrete `SqlErrorCodeR2dbcExceptionTranslator` is used, so this rule does not apply. It applies only if you have actually provided a subclass implementation. -2. Any custom implementation of the `SQLExceptionTranslator` interface that is provided as the `customSqlExceptionTranslator` property of the `SQLErrorCodes` class. -3. Error code matching is applied. -4. Use a fallback translator. - - -NOTE: The `SQLErrorCodesFactory` is used by default to define Error codes and custom exception translations. They are looked up in a file named `sql-error-codes.xml` from the classpath, and the matching `SQLErrorCodes` instance is located based on the database name from the database metadata of the database in use. `SQLErrorCodesFactory` is as of now part of Spring JDBC. Spring Data R2DBC reuses existing translation configurations. - -You can extend `SqlErrorCodeR2dbcExceptionTranslator`, as the following example shows: - -[source,java] ----- -public class CustomSqlErrorCodeR2dbcExceptionTranslator extends SqlErrorCodeR2dbcExceptionTranslator { - - protected DataAccessException customTranslate(String task, String sql, R2dbcException r2dbcex) { - if (sqlex.getErrorCode() == -12345) { - return new DeadlockLoserDataAccessException(task, r2dbcex); - } - return null; - } -} ----- - -In the preceding example, the specific error code (`-12345`) is translated, while other errors are left to be translated by the default translator implementation. -To use this custom translator, you must configure `DatabaseClient` through the builder method `exceptionTranslator`, and you must use this `DatabaseClient` for all of the data access processing where this translator is needed. -The following example shows how you can use this custom translator: - -[source,java] ----- -ConnectionFactory connectionFactory = …; - -CustomSqlErrorCodeR2dbcExceptionTranslator exceptionTranslator = new CustomSqlErrorCodeR2dbcExceptionTranslator(); - -DatabaseClient client = DatabaseClient.builder() - .connectionFactory(connectionFactory) - .exceptionTranslator(exceptionTranslator) - .build(); ----- - -[[r2dbc.datbaseclient.statements]] -=== Running Statements - -Running a statement is the basic functionality that is covered by `DatabaseClient`. -The following example shows what you need to include for a minimal but fully functional class that creates a new table: - -[source,java] ----- -Mono completion = client.execute() - .sql("CREATE TABLE person (id VARCHAR(255) PRIMARY KEY, name VARCHAR(255), age INTEGER);") - .then(); ----- - -`DatabaseClient` is designed for a convenient fluent usage. -It exposes intermediate, continuation, and terminal methods at each stage of the execution specification. -The example above uses `then()` to return a completion `Publisher` that completes as soon as the query (or queries, if the SQL query contains multiple statements) completes. - -NOTE: `execute().sql(…)` accepts either the SQL query string or a query `Supplier` to defer the actual query creation until execution. - -[[r2dbc.datbaseclient.queries]] -=== Running Queries - -SQL queries can return values or the number of affected rows. -`DatabaseClient` can return the number of updated rows or the rows themselves, depending on the issued query. - -The following example shows an `UPDATE` statement that returns the number of updated rows: - -[source,java] ----- -Mono affectedRows = client.execute() - .sql("UPDATE person SET name = 'Joe'") - .fetch().rowsUpdated(); ----- - -Running a `SELECT` query returns a different type of result, in particular tabular results. Tabular data is typically consumes by streaming each `Row`. -You might have noticed the use of `fetch()` in the previous example. -`fetch()` is a continuation operator that allows you to specify how much data you want to consume. - -[source,java] ----- -Mono> first = client.execute() - .sql("SELECT id, name FROM person") - .fetch().first(); ----- - -Calling `first()` returns the first row from the result and discards remaining rows. -You can consume data with the following operators: - -* `first()` return the first row of the entire result -* `one()` returns exactly one result and fails if the result contains more rows. -* `all()` returns all rows of the result -* `rowsUpdated()` returns the number of affected rows (`INSERT` count, `UPDATE` count) - -`DatabaseClient` queries return their results by default as `Map` of column name to value. You can customize type mapping by applying an `as(Class)` operator. - -[source,java] ----- -Flux all = client.execute() - .sql("SELECT id, name FROM mytable") - .as(Person.class) - .fetch().all(); ----- - -`as(…)` applies <> and maps the resulting columns to your POJO. - -[[r2dbc.datbaseclient.mapping]] -=== Mapping Results - -You can customize result extraction beyond `Map` and POJO result extraction by providing an extractor `BiFunction`. -The extractor function interacts directly with R2DBC's `Row` and `RowMetadata` objects and can return arbitrary values (singular values, collections/maps, objects). - -The following example extracts the `id` column and emits its value: - -[source,java] ----- -Flux names= client.execute() - .sql("SELECT name FROM person") - .map((row, rowMetadata) -> row.get("id", String.class)) - .all(); ----- - -[[r2dbc.datbaseclient.mapping.null]] -.What about `null`? -**** -Relational database results may contain `null` values. -Reactive Streams forbids emission of `null` values which requires a proper `null` handling in the extractor function. -While you can obtain `null` values from a `Row`, you must not emit a `null` value. -You must wrap any `null` values in an object (e.g. `Optional` for singular values) to make sure a `null` value is never returned directly by your extractor function. -**** - -[[r2dbc.datbaseclient.binding]] -=== Binding Values to Queries - -A typical application requires parameterized SQL statements to select or update rows according to some input. -These are typically `SELECT` statements constrained by a `WHERE` clause or `INSERT`/`UPDATE` statements accepting input parameters. -Parameterized statements bear the risk of SQL injection if parameters are not escaped properly. -`DatabaseClient` leverages R2DBC's Bind API to eliminate the risk of SQL injection for query parameters. -You can provide a parameterized SQL statement with the `sql(…)` operator and bind parameters to the actual `Statement`. -Your R2DBC driver then executes the statement using prepared statements and parameter substitution. - -Parameter binding supports various binding strategies: - -* By Index using zero-based parameter indexes. -* By Name using the placeholder name. - -The following example shows parameter binding for a query: - -[source,java] ----- -db.execute() - .sql("INSERT INTO person (id, name, age) VALUES(:id, :name, :age)") - .bind("id", "joe") - .bind("name", "Joe") - .bind("age", 34); ----- - -.R2DBC Native Bind Markers -**** -R2DBC uses database-native bind markers that depend on the actual database vendor. -As an example, Postgres uses indexed markers such as `$1`, `$2`, `$n`. -Another example is SQL Server that uses named bind markers prefixed with `@` (at). - -This is different from JDBC which requires `?` (question mark) as bind markers. -In JDBC, the actual drivers translate question mark bind markers to database-native markers as part of their statement execution. - -Spring Data R2DBC allows you to use native bind markers or named bind markers with the `:name` syntax. - -Named parameter support leverages ``Dialect``s to expand named parameters to native bind markers at the time of query execution which gives you a certain degree of query portability across various database vendors. -**** - -The query-preprocessor unrolls named `Collection` parameters into a series of bind markers to remove the need of dynamic query creation based on the number of arguments. -Nested object arrays are expanded to allow usage of e.g. select lists. - -Consider the following query: - -[source,sql] ----- -SELECT id, name, state FROM table WHERE (name, age) IN (('John', 35), ('Ann', 50)) ----- - -This query can be parametrized and executed as: - -[source,java] ----- -List tuples = new ArrayList<>(); -tuples.add(new Object[] {"John", 35}); -tuples.add(new Object[] {"Ann", 50}); - -db.execute() - .sql("SELECT id, name, state FROM table WHERE (name, age) IN (:tuples)") - .bind("tuples", tuples); ----- - -NOTE: Usage of select lists is vendor-dependent. - -A simpler variant using `IN` predicates: - -[source,java] ----- -db.execute() - .sql("SELECT id, name, state FROM table WHERE age IN (:ages)") - .bind("ages", Arrays.asList(35, 50)); ----- - -[[r2dbc.datbaseclient.transactions]] -=== Transactions - -A common pattern when using relational databases is grouping multiple queries within a unit of work that is guarded by a transaction. -Relational databases typically associate a transaction with a single transport connection. -Using different connections hence results in utilizing different transactions. -Spring Data R2DBC includes a transactional `DatabaseClient` implementation with `TransactionalDatabaseClient` that allows you to group multiple statements within the same transaction. -`TransactionalDatabaseClient` is a extension of `DatabaseClient` that exposes the same functionality as `DatabaseClient` and adds transaction-management methods. - -You can run multiple statements within a transaction using the `inTransaction(Function)` closure: - -[source,java] ----- -TransactionalDatabaseClient databaseClient = TransactionalDatabaseClient.create(connectionFactory); - -Flux completion = databaseClient.inTransaction(db -> { - - return db.execute().sql("INSERT INTO person (id, name, age) VALUES(:id, :name, :age)") - .bind("id", "joe") - .bind("name", "Joe") - .bind("age", 34) - .fetch().rowsUpdated() - .then(db.execute().sql("INSERT INTO contacts (id, name) VALUES(:id, :name)") - .bind("id", "joe") - .bind("name", "Joe") - .fetch().rowsUpdated()) - .then(); -}); ----- +include::r2dbc-transactions.adoc[leveloffset=+1] diff --git a/src/main/java/org/springframework/data/r2dbc/dialect/Bindings.java b/src/main/java/org/springframework/data/r2dbc/dialect/Bindings.java new file mode 100644 index 00000000..94fb90e0 --- /dev/null +++ b/src/main/java/org/springframework/data/r2dbc/dialect/Bindings.java @@ -0,0 +1,285 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.dialect; + +import io.r2dbc.spi.Statement; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Spliterator; +import java.util.function.Consumer; + +import org.springframework.data.r2dbc.domain.BindTarget; +import org.springframework.data.util.Streamable; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Value object representing value and {@code null} bindings for a {@link Statement} using {@link BindMarkers}. Bindings + * are typically immutable. + * + * @author Mark Paluch + */ +public class Bindings implements Streamable { + + private static final Bindings EMPTY = new Bindings(); + + private final Map bindings; + + /** + * Create empty {@link Bindings}. + */ + public Bindings() { + this.bindings = Collections.emptyMap(); + } + + /** + * Create {@link Bindings} from a {@link Map}. + * + * @param bindings must not be {@literal null}. + */ + public Bindings(Collection bindings) { + + Assert.notNull(bindings, "Bindings must not be null"); + + Map mapping = new LinkedHashMap<>(bindings.size()); + bindings.forEach(it -> mapping.put(it.getBindMarker(), it)); + this.bindings = mapping; + } + + Bindings(Map bindings) { + this.bindings = bindings; + } + + /** + * Create a new, empty {@link Bindings} object. + * + * @return a new, empty {@link Bindings} object. + */ + public static Bindings empty() { + return EMPTY; + } + + protected Map getBindings() { + return this.bindings; + } + + /** + * Merge this bindings with an other {@link Bindings} object and create a new merged {@link Bindings} object. + * + * @param left the left object to merge with. + * @param right the right object to merge with. + * @return a new, merged {@link Bindings} object. + */ + public static Bindings merge(Bindings left, Bindings right) { + + Assert.notNull(left, "Left side Bindings must not be null"); + Assert.notNull(right, "Right side Bindings must not be null"); + + List result = new ArrayList<>(left.getBindings().size() + right.getBindings().size()); + + result.addAll(left.getBindings().values()); + result.addAll(right.getBindings().values()); + + return new Bindings(result); + } + + /** + * Merge this bindings with an other {@link Bindings} object and create a new merged {@link Bindings} object. + * + * @param other the object to merge with. + * @return a new, merged {@link Bindings} object. + */ + public Bindings and(Bindings other) { + return merge(this, other); + } + + /** + * Apply the bindings to a {@link BindTarget}. + * + * @param bindTarget the target to apply bindings to. + */ + public void apply(BindTarget bindTarget) { + + Assert.notNull(bindTarget, "BindTarget must not be null"); + this.bindings.forEach((marker, binding) -> binding.apply(bindTarget)); + } + + /** + * Performs the given action for each binding of this {@link Bindings} until all bindings have been processed or the + * action throws an exception. Actions are performed in the order of iteration (if an iteration order is specified). + * Exceptions thrown by the action are relayed to the + * + * @param action The action to be performed for each {@link Binding}. + */ + public void forEach(Consumer action) { + this.bindings.forEach((marker, binding) -> action.accept(binding)); + } + + /* + * (non-Javadoc) + * @see java.lang.Iterable#iterator() + */ + @Override + public Iterator iterator() { + return this.bindings.values().iterator(); + } + + /* + * (non-Javadoc) + * @see java.lang.Iterable#spliterator() + */ + @Override + public Spliterator spliterator() { + return this.bindings.values().spliterator(); + } + + /** + * Base class for value objects representing a value or a {@code NULL} binding. + */ + public abstract static class Binding { + + private final BindMarker marker; + + protected Binding(BindMarker marker) { + this.marker = marker; + } + + /** + * @return the associated {@link BindMarker}. + */ + public BindMarker getBindMarker() { + return this.marker; + } + + /** + * Return {@literal true} if there is a value present, otherwise {@literal false} for a {@code NULL} binding. + * + * @return {@literal true} if there is a value present, otherwise {@literal false} for a {@code NULL} binding. + */ + public abstract boolean hasValue(); + + /** + * Return {@literal true} if this is is a {@code NULL} binding. + * + * @return {@literal true} if this is is a {@code NULL} binding. + */ + public boolean isNull() { + return !hasValue(); + } + + /** + * Returns the value of this binding. Can be {@literal null} if this is a {@code NULL} binding. + * + * @return value of this binding. Can be {@literal null} if this is a {@code NULL} binding. + */ + @Nullable + public abstract Object getValue(); + + /** + * Applies the binding to a {@link BindTarget}. + * + * @param bindTarget the target to apply bindings to. + */ + public abstract void apply(BindTarget bindTarget); + } + + /** + * Value binding. + */ + public static class ValueBinding extends Binding { + + private final Object value; + + public ValueBinding(BindMarker marker, Object value) { + super(marker); + this.value = value; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Bindings.Binding#hasValue() + */ + public boolean hasValue() { + return true; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Bindings.Binding#getValue() + */ + public Object getValue() { + return this.value; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Bindings.Binding#apply(io.r2dbc.spi.Statement) + */ + @Override + public void apply(BindTarget bindTarget) { + getBindMarker().bind(bindTarget, getValue()); + } + } + + /** + * {@code NULL} binding. + */ + public static class NullBinding extends Binding { + + private final Class valueType; + + public NullBinding(BindMarker marker, Class valueType) { + super(marker); + this.valueType = valueType; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Bindings.Binding#hasValue() + */ + public boolean hasValue() { + return false; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Bindings.Binding#getValue() + */ + @Nullable + public Object getValue() { + return null; + } + + public Class getValueType() { + return this.valueType; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Bindings.Binding#apply(BindTarget) + */ + @Override + public void apply(BindTarget bindTarget) { + getBindMarker().bindNull(bindTarget, getValueType()); + } + } +} diff --git a/src/main/java/org/springframework/data/r2dbc/dialect/MutableBindings.java b/src/main/java/org/springframework/data/r2dbc/dialect/MutableBindings.java new file mode 100644 index 00000000..dd2abda5 --- /dev/null +++ b/src/main/java/org/springframework/data/r2dbc/dialect/MutableBindings.java @@ -0,0 +1,132 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.dialect; + +import io.r2dbc.spi.Statement; + +import java.util.LinkedHashMap; + +import org.springframework.util.Assert; + +/** + * Mutable extension to {@link Bindings} for Value and {@code null} bindings for a {@link Statement} using + * {@link BindMarkers}. + * + * @author Mark Paluch + */ +public class MutableBindings extends Bindings { + + private final BindMarkers markers; + + /** + * Create new {@link MutableBindings}. + * + * @param markers must not be {@literal null}. + */ + public MutableBindings(BindMarkers markers) { + + super(new LinkedHashMap<>()); + + Assert.notNull(markers, "BindMarkers must not be null"); + + this.markers = markers; + } + + /** + * Obtain the next {@link BindMarker}. Increments {@link BindMarkers} state. + * + * @return the next {@link BindMarker}. + */ + public BindMarker nextMarker() { + return markers.next(); + } + + /** + * Obtain the next {@link BindMarker} with a name {@code hint}. Increments {@link BindMarkers} state. + * + * @param hint name hint. + * @return the next {@link BindMarker}. + */ + public BindMarker nextMarker(String hint) { + return markers.next(hint); + } + + /** + * Bind a value to {@link BindMarker}. + * + * @param marker must not be {@literal null}. + * @param value must not be {@literal null}. + * @return {@code this} {@link MutableBindings}. + */ + public MutableBindings bind(BindMarker marker, Object value) { + + Assert.notNull(marker, "BindMarker must not be null"); + Assert.notNull(value, "Value must not be null"); + + getBindings().put(marker, new ValueBinding(marker, value)); + + return this; + } + + /** + * Bind a value and return the related {@link BindMarker}. Increments {@link BindMarkers} state. + * + * @param value must not be {@literal null}. + * @return {@code this} {@link MutableBindings}. + */ + public BindMarker bind(Object value) { + + Assert.notNull(value, "Value must not be null"); + + BindMarker marker = nextMarker(); + getBindings().put(marker, new ValueBinding(marker, value)); + + return marker; + } + + /** + * Bind a {@code NULL} value to {@link BindMarker}. + * + * @param marker must not be {@literal null}. + * @param valueType must not be {@literal null}. + * @return {@code this} {@link MutableBindings}. + */ + public MutableBindings bindNull(BindMarker marker, Class valueType) { + + Assert.notNull(marker, "BindMarker must not be null"); + Assert.notNull(valueType, "Value type must not be null"); + + getBindings().put(marker, new NullBinding(marker, valueType)); + + return this; + } + + /** + * Bind a {@code NULL} value and return the related {@link BindMarker}. Increments {@link BindMarkers} state. + * + * @param valueType must not be {@literal null}. + * @return {@code this} {@link MutableBindings}. + */ + public BindMarker bindNull(Class valueType) { + + Assert.notNull(valueType, "Value type must not be null"); + + BindMarker marker = nextMarker(); + getBindings().put(marker, new NullBinding(marker, valueType)); + + return marker; + } +} diff --git a/src/main/java/org/springframework/data/r2dbc/function/BindableOperation.java b/src/main/java/org/springframework/data/r2dbc/domain/BindableOperation.java similarity index 77% rename from src/main/java/org/springframework/data/r2dbc/function/BindableOperation.java rename to src/main/java/org/springframework/data/r2dbc/domain/BindableOperation.java index d37c44f5..22f1baa3 100644 --- a/src/main/java/org/springframework/data/r2dbc/function/BindableOperation.java +++ b/src/main/java/org/springframework/data/r2dbc/domain/BindableOperation.java @@ -1,4 +1,19 @@ -package org.springframework.data.r2dbc.function; +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.domain; import io.r2dbc.spi.Statement; diff --git a/src/main/java/org/springframework/data/r2dbc/function/DatabaseClient.java b/src/main/java/org/springframework/data/r2dbc/function/DatabaseClient.java index da2cf453..39f7c8bf 100644 --- a/src/main/java/org/springframework/data/r2dbc/function/DatabaseClient.java +++ b/src/main/java/org/springframework/data/r2dbc/function/DatabaseClient.java @@ -30,6 +30,9 @@ import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.data.r2dbc.domain.PreparedOperation; +import org.springframework.data.r2dbc.domain.SettableValue; +import org.springframework.data.r2dbc.function.query.Criteria; +import org.springframework.data.r2dbc.function.query.Update; import org.springframework.data.r2dbc.support.R2dbcExceptionTranslator; /** @@ -58,6 +61,16 @@ public interface DatabaseClient { */ InsertIntoSpec insert(); + /** + * Prepare an SQL UPDATE call. + */ + UpdateTableSpec update(); + + /** + * Prepare an SQL DELETE call. + */ + DeleteFromSpec delete(); + /** * Return a builder to mutate properties of this database client. */ @@ -262,7 +275,7 @@ interface SelectFromSpec { } /** - * Contract for specifying {@code SELECT} options leading to the exchange. + * Contract for specifying {@code INSERT} options leading to the exchange. */ interface InsertIntoSpec { @@ -284,6 +297,52 @@ interface InsertIntoSpec { TypedInsertSpec into(Class table); } + /** + * Contract for specifying {@code UPDATE} options leading to the exchange. + */ + interface UpdateTableSpec { + + /** + * Specify the target {@literal table} to update. + * + * @param table must not be {@literal null} or empty. + * @return a {@link GenericUpdateSpec} for further configuration of the update. Guaranteed to be not + * {@literal null}. + */ + GenericUpdateSpec table(String table); + + /** + * Specify the target table to update to using the {@link Class entity class}. + * + * @param table must not be {@literal null}. + * @return a {@link TypedUpdateSpec} for further configuration of the update. Guaranteed to be not {@literal null}. + */ + TypedUpdateSpec table(Class table); + } + + /** + * Contract for specifying {@code DELETE} options leading to the exchange. + */ + interface DeleteFromSpec { + + /** + * Specify the source {@literal table} to delete from. + * + * @param table must not be {@literal null} or empty. + * @return a {@link DeleteMatchingSpec} for further configuration of the delete. Guaranteed to be not + * {@literal null}. + */ + DeleteMatchingSpec from(String table); + + /** + * Specify the source table to delete from to using the {@link Class entity class}. + * + * @param table must not be {@literal null}. + * @return a {@link TypedDeleteSpec} for further configuration of the delete. Guaranteed to be not {@literal null}. + */ + TypedDeleteSpec from(Class table); + } + /** * Contract for specifying {@code SELECT} options leading to the exchange. */ @@ -354,6 +413,13 @@ interface SelectSpec> { */ S project(String... selectedFields); + /** + * Configure a filter {@link Criteria}. + * + * @param criteria must not be {@literal null}. + */ + S matching(Criteria criteria); + /** * Configure {@link Sort}. * @@ -361,12 +427,21 @@ interface SelectSpec> { */ S orderBy(Sort sort); + /** + * Configure {@link Sort}. + * + * @param orders must not be {@literal null}. + */ + default S orderBy(Sort.Order... orders) { + return orderBy(Sort.by(orders)); + } + /** * Configure pagination. Overrides {@link Sort} if the {@link Pageable} contains a {@link Sort} object. * - * @param page must not be {@literal null}. + * @param pageable must not be {@literal null}. */ - S page(Pageable page); + S page(Pageable pageable); } /** @@ -389,12 +464,23 @@ interface GenericInsertSpec extends InsertSpec { * * @param field must not be {@literal null} or empty. * @param type must not be {@literal null}. + * @deprecated will be removed soon. Use {@link #nullValue(String)}. */ - GenericInsertSpec nullValue(String field, Class type); + @Deprecated + default GenericInsertSpec nullValue(String field, Class type) { + return value(field, SettableValue.empty(type)); + } + + /** + * Specify a {@literal null} value to insert. + * + * @param field must not be {@literal null} or empty. + */ + GenericInsertSpec nullValue(String field); } /** - * Contract for specifying {@code SELECT} options leading the exchange. + * Contract for specifying {@code INSERT} options leading the exchange. */ interface TypedInsertSpec { @@ -437,7 +523,7 @@ interface InsertSpec { /** * Configure a result mapping {@link java.util.function.BiFunction function}. * - * @param mappwingFunction must not be {@literal null}. + * @param mappingFunction must not be {@literal null}. * @param result type. * @return a {@link FetchSpec} for configuration what to fetch. Guaranteed to be not {@literal null}. */ @@ -456,6 +542,125 @@ interface InsertSpec { Mono then(); } + /** + * Contract for specifying {@code UPDATE} options leading to the exchange. + */ + interface GenericUpdateSpec { + + /** + * Specify an {@link Update} object containing assignments. + * + * @param update must not be {@literal null}. + */ + UpdateMatchingSpec using(Update update); + } + + /** + * Contract for specifying {@code UPDATE} options leading to the exchange. + */ + interface TypedUpdateSpec { + + /** + * Update the given {@code objectToUpdate}. + * + * @param objectToUpdate the object of which the attributes will provide the values for the update and the primary + * key. Must not be {@literal null}. + * @return a {@link UpdateSpec} for further configuration of the update. Guaranteed to be not {@literal null}. + */ + UpdateSpec using(T objectToUpdate); + + /** + * Use the given {@code tableName} as update target. + * + * @param tableName must not be {@literal null} or empty. + * @return a {@link TypedUpdateSpec} for further configuration of the update. Guaranteed to be not {@literal null}. + */ + TypedUpdateSpec table(String tableName); + } + + /** + * Contract for specifying {@code UPDATE} options leading to the exchange. + */ + interface UpdateMatchingSpec extends UpdateSpec { + + /** + * Configure a filter {@link Criteria}. + * + * @param criteria must not be {@literal null}. + */ + UpdateSpec matching(Criteria criteria); + } + + /** + * Contract for specifying {@code UPDATE} options leading to the exchange. + */ + interface UpdateSpec { + + /** + * Perform the SQL call and retrieve the result. + */ + UpdatedRowsFetchSpec fetch(); + + /** + * Perform the SQL call and return a {@link Mono} that completes without result on statement completion. + * + * @return a {@link Mono} ignoring its payload (actively dropping). + */ + Mono then(); + } + + /** + * Contract for specifying {@code DELETE} options leading to the exchange. + */ + interface TypedDeleteSpec extends DeleteSpec { + + /** + * Use the given {@code tableName} as delete target. + * + * @param tableName must not be {@literal null} or empty. + * @return a {@link TypedDeleteSpec} for further configuration of the delete. Guaranteed to be not {@literal null}. + */ + TypedDeleteSpec table(String tableName); + + /** + * Configure a filter {@link Criteria}. + * + * @param criteria must not be {@literal null}. + */ + DeleteSpec matching(Criteria criteria); + } + + /** + * Contract for specifying {@code DELETE} options leading to the exchange. + */ + interface DeleteMatchingSpec extends DeleteSpec { + + /** + * Configure a filter {@link Criteria}. + * + * @param criteria must not be {@literal null}. + */ + DeleteSpec matching(Criteria criteria); + } + + /** + * Contract for specifying {@code DELETE} options leading to the exchange. + */ + interface DeleteSpec { + + /** + * Perform the SQL call and retrieve the result. + */ + UpdatedRowsFetchSpec fetch(); + + /** + * Perform the SQL call and return a {@link Mono} that completes without result on statement completion. + * + * @return a {@link Mono} ignoring its payload (actively dropping). + */ + Mono then(); + } + /** * Contract for specifying parameter bindings. */ diff --git a/src/main/java/org/springframework/data/r2dbc/function/DefaultDatabaseClient.java b/src/main/java/org/springframework/data/r2dbc/function/DefaultDatabaseClient.java index ebf2107f..4db374c0 100644 --- a/src/main/java/org/springframework/data/r2dbc/function/DefaultDatabaseClient.java +++ b/src/main/java/org/springframework/data/r2dbc/function/DefaultDatabaseClient.java @@ -34,7 +34,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; -import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; @@ -53,13 +52,15 @@ import org.springframework.data.domain.Sort; import org.springframework.data.r2dbc.UncategorizedR2dbcException; import org.springframework.data.r2dbc.domain.BindTarget; +import org.springframework.data.r2dbc.domain.BindableOperation; import org.springframework.data.r2dbc.domain.OutboundRow; import org.springframework.data.r2dbc.domain.PreparedOperation; import org.springframework.data.r2dbc.domain.SettableValue; import org.springframework.data.r2dbc.function.connectionfactory.ConnectionProxy; import org.springframework.data.r2dbc.function.convert.ColumnMapRowMapper; +import org.springframework.data.r2dbc.function.query.Criteria; +import org.springframework.data.r2dbc.function.query.Update; import org.springframework.data.r2dbc.support.R2dbcExceptionTranslator; -import org.springframework.data.relational.core.sql.Insert; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -96,7 +97,7 @@ class DefaultDatabaseClient implements DatabaseClient, ConnectionAccessor { @Override public Builder mutate() { - return builder; + return this.builder; } @Override @@ -114,6 +115,16 @@ public InsertIntoSpec insert() { return new DefaultInsertIntoSpec(); } + @Override + public UpdateTableSpec update() { + return new DefaultUpdateTableSpec(); + } + + @Override + public DeleteFromSpec delete() { + return new DefaultDeleteFromSpec(); + } + /** * Execute a callback {@link Function} within a {@link Connection} scope. The function is responsible for creating a * {@link Mono}. The connection is released after the {@link Mono} terminates (or the subscription is cancelled). @@ -196,7 +207,7 @@ protected Publisher closeConnection(Connection connection) { * @throws IllegalStateException in case of no DataSource set */ protected ConnectionFactory obtainConnectionFactory() { - return connector; + return this.connector; } /** @@ -220,7 +231,7 @@ protected Connection createConnectionProxy(Connection con) { */ protected DataAccessException translateException(String task, @Nullable String sql, R2dbcException ex) { - DataAccessException dae = exceptionTranslator.translate(task, sql, ex); + DataAccessException dae = this.exceptionTranslator.translate(task, sql, ex); return (dae != null ? dae : new UncategorizedR2dbcException(task, sql, ex)); } @@ -345,9 +356,9 @@ FetchSpec exchange(Supplier sqlSupplier, BiFunction FetchSpec exchange(Supplier sqlSupplier, BiFunction { + this.byName.forEach((name, o) -> { if (o.getValue() != null) { operation.bind(bindTarget, name, o.getValue()); @@ -364,7 +375,7 @@ FetchSpec exchange(Supplier sqlSupplier, BiFunction FetchSpec map(BiFunction mappingFunction) { @Override public FetchSpec fetch() { - return exchange(this.sqlSupplier, mappingFunction); + return exchange(this.sqlSupplier, this.mappingFunction); } @Override @@ -596,7 +607,7 @@ public DefaultTypedExecuteSpec bind(Object bean) { @Override protected DefaultTypedExecuteSpec createInstance(Map byIndex, Map byName, Supplier sqlSupplier) { - return createTypedExecuteSpec(byIndex, byName, sqlSupplier, typeToRead); + return createTypedExecuteSpec(byIndex, byName, sqlSupplier, this.typeToRead); } } @@ -624,6 +635,7 @@ private abstract class DefaultSelectSpecSupport { final String table; final List projectedFields; + final @Nullable Criteria criteria; final Sort sort; final Pageable page; @@ -633,6 +645,7 @@ private abstract class DefaultSelectSpecSupport { this.table = table; this.projectedFields = Collections.emptyList(); + this.criteria = null; this.sort = Sort.unsorted(); this.page = Pageable.unpaged(); } @@ -644,34 +657,34 @@ public DefaultSelectSpecSupport project(String... selectedFields) { projectedFields.addAll(this.projectedFields); projectedFields.addAll(Arrays.asList(selectedFields)); - return createInstance(table, projectedFields, sort, page); + return createInstance(this.table, projectedFields, this.criteria, this.sort, this.page); + } + + public DefaultSelectSpecSupport where(Criteria whereCriteria) { + + Assert.notNull(whereCriteria, "Criteria must not be null!"); + + return createInstance(this.table, this.projectedFields, whereCriteria, this.sort, this.page); } public DefaultSelectSpecSupport orderBy(Sort sort) { Assert.notNull(sort, "Sort must not be null!"); - return createInstance(table, projectedFields, sort, page); + return createInstance(this.table, this.projectedFields, this.criteria, sort, this.page); } public DefaultSelectSpecSupport page(Pageable page) { Assert.notNull(page, "Pageable must not be null!"); - return createInstance(table, projectedFields, sort, page); + return createInstance(this.table, this.projectedFields, this.criteria, this.sort, page); } - FetchSpec execute(String sql, BiFunction mappingFunction) { - - Function selectFunction = it -> { - - if (logger.isDebugEnabled()) { - logger.debug("Executing SQL statement [" + sql + "]"); - } - - return it.createStatement(sql); - }; + FetchSpec execute(PreparedOperation preparedOperation, BiFunction mappingFunction) { + String sql = getRequiredSql(preparedOperation); + Function selectFunction = wrapPreparedOperation(sql, preparedOperation); Function> resultFunction = it -> Flux.from(selectFunction.apply(it).execute()); return new DefaultSqlResult<>(DefaultDatabaseClient.this, // @@ -681,14 +694,14 @@ FetchSpec execute(String sql, BiFunction mappingFunc mappingFunction); } - protected abstract DefaultSelectSpecSupport createInstance(String table, List projectedFields, Sort sort, - Pageable page); + protected abstract DefaultSelectSpecSupport createInstance(String table, List projectedFields, + Criteria criteria, Sort sort, Pageable page); } private class DefaultGenericSelectSpec extends DefaultSelectSpecSupport implements GenericSelectSpec { - DefaultGenericSelectSpec(String table, List projectedFields, Sort sort, Pageable page) { - super(table, projectedFields, sort, page); + DefaultGenericSelectSpec(String table, List projectedFields, Criteria criteria, Sort sort, Pageable page) { + super(table, projectedFields, criteria, sort, page); } DefaultGenericSelectSpec(String table) { @@ -700,8 +713,8 @@ public TypedSelectSpec as(Class resultType) { Assert.notNull(resultType, "Result type must not be null!"); - return new DefaultTypedSelectSpec<>(table, projectedFields, sort, page, resultType, - dataAccessStrategy.getRowMapper(resultType)); + return new DefaultTypedSelectSpec<>(this.table, this.projectedFields, this.criteria, this.sort, this.page, + resultType, dataAccessStrategy.getRowMapper(resultType)); } @Override @@ -717,14 +730,19 @@ public DefaultGenericSelectSpec project(String... selectedFields) { return (DefaultGenericSelectSpec) super.project(selectedFields); } + @Override + public DefaultGenericSelectSpec matching(Criteria criteria) { + return (DefaultGenericSelectSpec) super.where(criteria); + } + @Override public DefaultGenericSelectSpec orderBy(Sort sort) { return (DefaultGenericSelectSpec) super.orderBy(sort); } @Override - public DefaultGenericSelectSpec page(Pageable page) { - return (DefaultGenericSelectSpec) super.page(page); + public DefaultGenericSelectSpec page(Pageable pageable) { + return (DefaultGenericSelectSpec) super.page(pageable); } @Override @@ -734,15 +752,23 @@ public FetchSpec> fetch() { private FetchSpec exchange(BiFunction mappingFunction) { - String select = dataAccessStrategy.select(table, new LinkedHashSet<>(this.projectedFields), sort, page); + StatementMapper mapper = dataAccessStrategy.getStatementMapper(); + + StatementMapper.SelectSpec selectSpec = mapper.createSelect(this.table).withProjection(this.projectedFields) + .withSort(this.sort).withPage(this.page); + + if (this.criteria != null) { + selectSpec = selectSpec.withCriteria(this.criteria); + } - return execute(select, mappingFunction); + PreparedOperation operation = mapper.getMappedObject(selectSpec); + return execute(operation, mappingFunction); } @Override - protected DefaultGenericSelectSpec createInstance(String table, List projectedFields, Sort sort, - Pageable page) { - return new DefaultGenericSelectSpec(table, projectedFields, sort, page); + protected DefaultGenericSelectSpec createInstance(String table, List projectedFields, Criteria criteria, + Sort sort, Pageable page) { + return new DefaultGenericSelectSpec(table, projectedFields, criteria, sort, page); } } @@ -763,14 +789,14 @@ private class DefaultTypedSelectSpec extends DefaultSelectSpecSupport impleme this.mappingFunction = dataAccessStrategy.getRowMapper(typeToRead); } - DefaultTypedSelectSpec(String table, List projectedFields, Sort sort, Pageable page, + DefaultTypedSelectSpec(String table, List projectedFields, Criteria criteria, Sort sort, Pageable page, BiFunction mappingFunction) { - this(table, projectedFields, sort, page, null, mappingFunction); + this(table, projectedFields, criteria, sort, page, null, mappingFunction); } - DefaultTypedSelectSpec(String table, List projectedFields, Sort sort, Pageable page, Class typeToRead, - BiFunction mappingFunction) { - super(table, projectedFields, sort, page); + DefaultTypedSelectSpec(String table, List projectedFields, Criteria criteria, Sort sort, Pageable page, + Class typeToRead, BiFunction mappingFunction) { + super(table, projectedFields, criteria, sort, page); this.typeToRead = typeToRead; this.mappingFunction = mappingFunction; } @@ -796,40 +822,54 @@ public DefaultTypedSelectSpec project(String... selectedFields) { return (DefaultTypedSelectSpec) super.project(selectedFields); } + @Override + public DefaultTypedSelectSpec matching(Criteria criteria) { + return (DefaultTypedSelectSpec) super.where(criteria); + } + @Override public DefaultTypedSelectSpec orderBy(Sort sort) { return (DefaultTypedSelectSpec) super.orderBy(sort); } @Override - public DefaultTypedSelectSpec page(Pageable page) { - return (DefaultTypedSelectSpec) super.page(page); + public DefaultTypedSelectSpec page(Pageable pageable) { + return (DefaultTypedSelectSpec) super.page(pageable); } @Override public FetchSpec fetch() { - return exchange(mappingFunction); + return exchange(this.mappingFunction); } private FetchSpec exchange(BiFunction mappingFunction) { List columns; + StatementMapper mapper = dataAccessStrategy.getStatementMapper().forType(this.typeToRead); if (this.projectedFields.isEmpty()) { - columns = dataAccessStrategy.getAllColumns(typeToRead); + columns = dataAccessStrategy.getAllColumns(this.typeToRead); } else { columns = this.projectedFields; } - String select = dataAccessStrategy.select(table, new LinkedHashSet<>(columns), sort, page); + StatementMapper.SelectSpec selectSpec = mapper.createSelect(this.table).withProjection(columns) + .withPage(this.page).withSort(this.sort); + + if (this.criteria != null) { + selectSpec = selectSpec.withCriteria(this.criteria); + } + + PreparedOperation operation = mapper.getMappedObject(selectSpec); - return execute(select, mappingFunction); + return execute(operation, mappingFunction); } @Override - protected DefaultTypedSelectSpec createInstance(String table, List projectedFields, Sort sort, - Pageable page) { - return new DefaultTypedSelectSpec<>(table, projectedFields, sort, page, typeToRead, mappingFunction); + protected DefaultTypedSelectSpec createInstance(String table, List projectedFields, Criteria criteria, + Sort sort, Pageable page) { + return new DefaultTypedSelectSpec<>(table, projectedFields, criteria, sort, page, this.typeToRead, + this.mappingFunction); } } @@ -867,18 +907,22 @@ public GenericInsertSpec value(String field, Object value) { () -> String.format("Value for field %s must not be null. Use nullValue(…) instead.", field)); Map byName = new LinkedHashMap<>(this.byName); - byName.put(field, SettableValue.fromOrEmpty(value, value.getClass())); + if (value instanceof SettableValue) { + byName.put(field, (SettableValue) value); + } else { + byName.put(field, SettableValue.fromOrEmpty(value, value.getClass())); + } return new DefaultGenericInsertSpec<>(this.table, byName, this.mappingFunction); } @Override - public GenericInsertSpec nullValue(String field, Class type) { + public GenericInsertSpec nullValue(String field) { Assert.notNull(field, "Field must not be null!"); Map byName = new LinkedHashMap<>(this.byName); - byName.put(field, SettableValue.empty(type)); + byName.put(field, null); return new DefaultGenericInsertSpec<>(this.table, byName, this.mappingFunction); } @@ -903,30 +947,19 @@ public Mono then() { private FetchSpec exchange(BiFunction mappingFunction) { - if (byName.isEmpty()) { + if (this.byName.isEmpty()) { throw new IllegalStateException("Insert fields is empty!"); } - PreparedOperation operation = dataAccessStrategy.getStatements().insert(table, Collections.emptyList(), - it -> { - byName.forEach(it::bind); - }); - - String sql = getRequiredSql(operation); + StatementMapper mapper = dataAccessStrategy.getStatementMapper(); + StatementMapper.InsertSpec insert = mapper.createInsert(this.table); - Function> resultFunction = it -> { - - Statement statement = it.createStatement(sql); - operation.bindTo(new StatementWrapper(statement)); - - return Flux.from(statement.execute()); - }; + for (String column : this.byName.keySet()) { + insert = insert.withColumn(column, this.byName.get(column)); + } - return new DefaultSqlResult<>(DefaultDatabaseClient.this, // - sql, // - resultFunction, // - it -> resultFunction.apply(it).flatMap(Result::getRowsUpdated).next(), // - mappingFunction); + PreparedOperation operation = mapper.getMappedObject(insert); + return exchangeInsert(mappingFunction, operation); } } @@ -954,7 +987,7 @@ public TypedInsertSpec table(String tableName) { Assert.hasText(tableName, "Table name must not be null or empty!"); - return new DefaultTypedInsertSpec<>(typeToInsert, tableName, objectToInsert, this.mappingFunction); + return new DefaultTypedInsertSpec<>(this.typeToInsert, tableName, this.objectToInsert, this.mappingFunction); } @Override @@ -962,7 +995,8 @@ public InsertSpec using(T objectToInsert) { Assert.notNull(objectToInsert, "Object to insert must not be null!"); - return new DefaultTypedInsertSpec<>(typeToInsert, table, Mono.just(objectToInsert), this.mappingFunction); + return new DefaultTypedInsertSpec<>(this.typeToInsert, this.table, Mono.just(objectToInsert), + this.mappingFunction); } @Override @@ -970,7 +1004,7 @@ public InsertSpec using(Publisher objectToInsert) { Assert.notNull(objectToInsert, "Publisher to insert must not be null!"); - return new DefaultTypedInsertSpec<>(typeToInsert, table, objectToInsert, this.mappingFunction); + return new DefaultTypedInsertSpec<>(this.typeToInsert, this.table, objectToInsert, this.mappingFunction); } @Override @@ -988,7 +1022,7 @@ public FetchSpec fetch() { @Override public Mono then() { - return Mono.from(objectToInsert).flatMapMany(toInsert -> exchange(toInsert, (row, md) -> row).all()).then(); + return Mono.from(this.objectToInsert).flatMapMany(toInsert -> exchange(toInsert, (row, md) -> row).all()).then(); } private FetchSpec exchange(BiFunction mappingFunction) { @@ -1021,35 +1055,299 @@ private FetchSpec exchange(Object toInsert, BiFunction operation = dataAccessStrategy.getStatements().insert(table, Collections.emptyList(), - it -> { - outboundRow.forEach((k, v) -> { + StatementMapper mapper = dataAccessStrategy.getStatementMapper(); + StatementMapper.InsertSpec insert = mapper.createInsert(this.table); + + for (String column : outboundRow.keySet()) { + SettableValue settableValue = outboundRow.get(column); + if (settableValue.hasValue()) { + insert = insert.withColumn(column, settableValue); + } + } + + PreparedOperation operation = mapper.getMappedObject(insert); + return exchangeInsert(mappingFunction, operation); + } + } + + /** + * Default {@link DatabaseClient.UpdateTableSpec} implementation. + */ + class DefaultUpdateTableSpec implements UpdateTableSpec { - if (v.hasValue()) { - it.bind(k, v); - } - }); - }); + @Override + public GenericUpdateSpec table(String table) { + return new DefaultGenericUpdateSpec(null, table, null, null); + } - String sql = getRequiredSql(operation); - Function> resultFunction = it -> { + @Override + public TypedUpdateSpec table(Class table) { + return new DefaultTypedUpdateSpec<>(table, null, null); + } + } - Statement statement = it.createStatement(sql); - operation.bindTo(new StatementWrapper(statement)); - statement.returnGeneratedValues(); + @RequiredArgsConstructor + class DefaultGenericUpdateSpec implements GenericUpdateSpec, UpdateMatchingSpec { - return Flux.from(statement.execute()); - }; + private final @Nullable Class typeToUpdate; + private final @Nullable String table; + private final Update assignments; + private final Criteria where; - return new DefaultSqlResult<>(DefaultDatabaseClient.this, // - sql, // - resultFunction, // - it -> resultFunction // - .apply(it) // - .flatMap(Result::getRowsUpdated) // - .collect(Collectors.summingInt(Integer::intValue)), // - mappingFunction); + @Override + public UpdateMatchingSpec using(Update update) { + + Assert.notNull(update, "Update must not be null"); + + return new DefaultGenericUpdateSpec(this.typeToUpdate, this.table, update, this.where); + } + + @Override + public UpdateSpec matching(Criteria criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + + return new DefaultGenericUpdateSpec(this.typeToUpdate, this.table, this.assignments, criteria); + } + + @Override + public UpdatedRowsFetchSpec fetch() { + + String table; + + if (StringUtils.isEmpty(this.table)) { + table = dataAccessStrategy.getTableName(this.typeToUpdate); + } else { + table = this.table; + } + + return exchange(table); + } + + @Override + public Mono then() { + return fetch().rowsUpdated().then(); + } + + private UpdatedRowsFetchSpec exchange(String table) { + + StatementMapper mapper = dataAccessStrategy.getStatementMapper(); + + if (this.typeToUpdate != null) { + mapper = mapper.forType(this.typeToUpdate); + } + + StatementMapper.UpdateSpec update = mapper.createUpdate(table, this.assignments); + + if (this.where != null) { + update = update.withCriteria(this.where); + } + + PreparedOperation operation = mapper.getMappedObject(update); + + return exchangeUpdate(operation); + } + } + + @RequiredArgsConstructor + class DefaultTypedUpdateSpec implements TypedUpdateSpec, UpdateSpec { + + private final @Nullable Class typeToUpdate; + private final @Nullable String table; + private final T objectToUpdate; + + @Override + public UpdateSpec using(T objectToUpdate) { + + Assert.notNull(objectToUpdate, "Object to update must not be null"); + + return new DefaultTypedUpdateSpec<>(this.typeToUpdate, this.table, objectToUpdate); + } + + @Override + public TypedUpdateSpec table(String tableName) { + + Assert.hasText(tableName, "Table name must not be null or empty!"); + + return new DefaultTypedUpdateSpec<>(this.typeToUpdate, tableName, this.objectToUpdate); + } + + @Override + public UpdatedRowsFetchSpec fetch() { + + String table; + + if (StringUtils.isEmpty(this.table)) { + table = dataAccessStrategy.getTableName(this.typeToUpdate); + } else { + table = this.table; + } + + return exchange(table); + } + + @Override + public Mono then() { + return fetch().rowsUpdated().then(); + } + + private UpdatedRowsFetchSpec exchange(String table) { + + StatementMapper mapper = dataAccessStrategy.getStatementMapper(); + Map columns = dataAccessStrategy.getOutboundRow(this.objectToUpdate); + List ids = dataAccessStrategy.getIdentifierColumns(this.typeToUpdate); + + if (ids.isEmpty()) { + throw new IllegalStateException("No identifier columns in " + this.typeToUpdate.getName() + "!"); + } + Object id = columns.remove(ids.get(0)); // do not update the Id column. + + Update update = null; + + for (String column : columns.keySet()) { + if (update == null) { + update = Update.update(column, columns.get(column)); + } else { + update = update.set(column, columns.get(column)); + } + } + + PreparedOperation operation = mapper + .getMappedObject(mapper.createUpdate(table, update).withCriteria(Criteria.where(ids.get(0)).is(id))); + + return exchangeUpdate(operation); + } + } + + /** + * Default {@link DatabaseClient.DeleteFromSpec} implementation. + */ + class DefaultDeleteFromSpec implements DeleteFromSpec { + + @Override + public DefaultDeleteSpec from(String table) { + return new DefaultDeleteSpec<>(null, table, null); + } + + @Override + public DefaultDeleteSpec from(Class table) { + return new DefaultDeleteSpec<>(table, null, null); + } + } + + /** + * Default implementation of {@link DatabaseClient.TypedInsertSpec}. + */ + @RequiredArgsConstructor + class DefaultDeleteSpec implements DeleteMatchingSpec, TypedDeleteSpec { + + private final @Nullable Class typeToDelete; + private final @Nullable String table; + private final Criteria where; + + @Override + public DeleteSpec matching(Criteria criteria) { + + Assert.notNull(criteria, "Criteria must not be null!"); + + return new DefaultDeleteSpec<>(this.typeToDelete, this.table, criteria); + } + + @Override + public TypedDeleteSpec table(String tableName) { + + Assert.hasText(tableName, "Table name must not be null or empty!"); + + return new DefaultDeleteSpec<>(this.typeToDelete, tableName, this.where); + } + + @Override + public UpdatedRowsFetchSpec fetch() { + + String table; + + if (StringUtils.isEmpty(this.table)) { + table = dataAccessStrategy.getTableName(this.typeToDelete); + } else { + table = this.table; + } + + return exchange(table); } + + @Override + public Mono then() { + return fetch().rowsUpdated().then(); + } + + private UpdatedRowsFetchSpec exchange(String table) { + + StatementMapper mapper = dataAccessStrategy.getStatementMapper(); + + if (this.typeToDelete != null) { + mapper = mapper.forType(this.typeToDelete); + } + + StatementMapper.DeleteSpec delete = mapper.createDelete(table); + + if (this.where != null) { + delete = delete.withCriteria(this.where); + } + + PreparedOperation operation = mapper.getMappedObject(delete); + + return exchangeUpdate(operation); + } + } + + private FetchSpec exchangeInsert(BiFunction mappingFunction, + PreparedOperation operation) { + + String sql = getRequiredSql(operation); + Function insertFunction = wrapPreparedOperation(sql, operation) + .andThen(statement -> statement.returnGeneratedValues()); + Function> resultFunction = it -> Flux.from(insertFunction.apply(it).execute()); + + return new DefaultSqlResult<>(this, // + sql, // + resultFunction, // + it -> sumRowsUpdated(resultFunction, it), // + mappingFunction); + } + + private UpdatedRowsFetchSpec exchangeUpdate(PreparedOperation operation) { + + String sql = getRequiredSql(operation); + Function executeFunction = wrapPreparedOperation(sql, operation); + Function> resultFunction = it -> Flux.from(executeFunction.apply(it).execute()); + + return new DefaultSqlResult<>(this, // + sql, // + resultFunction, // + it -> sumRowsUpdated(resultFunction, it), // + (row, rowMetadata) -> rowMetadata); + } + + private static Mono sumRowsUpdated(Function> resultFunction, Connection it) { + + return resultFunction.apply(it) // + .flatMap(Result::getRowsUpdated) // + .collect(Collectors.summingInt(Integer::intValue)); + } + + private Function wrapPreparedOperation(String sql, PreparedOperation operation) { + + return it -> { + + if (this.logger.isDebugEnabled()) { + this.logger.debug("Executing SQL statement [" + sql + "]"); + } + + Statement statement = it.createStatement(sql); + operation.bindTo(new StatementWrapper(statement)); + + return statement; + }; } private static Flux doInConnectionMany(Connection connection, Function> action) { @@ -1164,7 +1462,7 @@ Mono close() { return Mono.defer(() -> { if (compareAndSet(false, true)) { - return Mono.from(closeFunction.apply(connection)); + return Mono.from(this.closeFunction.apply(this.connection)); } return Mono.empty(); diff --git a/src/main/java/org/springframework/data/r2dbc/function/DefaultReactiveDataAccessStrategy.java b/src/main/java/org/springframework/data/r2dbc/function/DefaultReactiveDataAccessStrategy.java index 17f3ddbd..a7b7af1b 100644 --- a/src/main/java/org/springframework/data/r2dbc/function/DefaultReactiveDataAccessStrategy.java +++ b/src/main/java/org/springframework/data/r2dbc/function/DefaultReactiveDataAccessStrategy.java @@ -19,19 +19,13 @@ import io.r2dbc.spi.RowMetadata; import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.OptionalLong; -import java.util.Set; import java.util.function.BiFunction; import java.util.function.Function; import org.springframework.dao.InvalidDataAccessResourceUsageException; import org.springframework.data.convert.CustomConversions.StoreConversions; -import org.springframework.data.domain.Pageable; -import org.springframework.data.domain.Sort; -import org.springframework.data.domain.Sort.Order; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.r2dbc.dialect.ArrayColumns; import org.springframework.data.r2dbc.dialect.BindMarkersFactory; @@ -42,16 +36,11 @@ import org.springframework.data.r2dbc.function.convert.MappingR2dbcConverter; import org.springframework.data.r2dbc.function.convert.R2dbcConverter; import org.springframework.data.r2dbc.function.convert.R2dbcCustomConversions; -import org.springframework.data.r2dbc.support.StatementRenderUtil; +import org.springframework.data.r2dbc.function.query.UpdateMapper; import org.springframework.data.relational.core.mapping.RelationalMappingContext; import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; -import org.springframework.data.relational.core.sql.Expression; -import org.springframework.data.relational.core.sql.OrderByField; import org.springframework.data.relational.core.sql.Select; -import org.springframework.data.relational.core.sql.SelectBuilder.SelectFromAndOrderBy; -import org.springframework.data.relational.core.sql.StatementBuilder; -import org.springframework.data.relational.core.sql.Table; import org.springframework.data.relational.core.sql.render.NamingStrategies; import org.springframework.data.relational.core.sql.render.RenderContext; import org.springframework.data.relational.core.sql.render.RenderNamingStrategy; @@ -69,8 +58,9 @@ public class DefaultReactiveDataAccessStrategy implements ReactiveDataAccessStra private final Dialect dialect; private final R2dbcConverter converter; + private final UpdateMapper updateMapper; private final MappingContext, ? extends RelationalPersistentProperty> mappingContext; - private final StatementFactory statements; + private final StatementMapper statementMapper; /** * Creates a new {@link DefaultReactiveDataAccessStrategy} given {@link Dialect}. @@ -94,14 +84,6 @@ private static R2dbcConverter createConverter(Dialect dialect) { return new MappingR2dbcConverter(context, customConversions); } - public R2dbcConverter getConverter() { - return converter; - } - - public MappingContext, ? extends RelationalPersistentProperty> getMappingContext() { - return mappingContext; - } - /** * Creates a new {@link DefaultReactiveDataAccessStrategy} given {@link Dialect} and {@link R2dbcConverter}. * @@ -115,6 +97,7 @@ public DefaultReactiveDataAccessStrategy(Dialect dialect, R2dbcConverter convert Assert.notNull(converter, "RelationalConverter must not be null"); this.converter = converter; + this.updateMapper = new UpdateMapper(converter); this.mappingContext = (MappingContext, ? extends RelationalPersistentProperty>) this.converter .getMappingContext(); this.dialect = dialect; @@ -141,17 +124,17 @@ public SelectRenderContext getSelect() { } }; - this.statements = new DefaultStatementFactory(this.dialect, renderContext); + this.statementMapper = new DefaultStatementMapper(dialect, renderContext, this.updateMapper, this.mappingContext); } /* * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getAllFields(java.lang.Class) + * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getAllColumns(java.lang.Class) */ @Override - public List getAllColumns(Class typeToRead) { + public List getAllColumns(Class entityType) { - RelationalPersistentEntity persistentEntity = getPersistentEntity(typeToRead); + RelationalPersistentEntity persistentEntity = getPersistentEntity(entityType); if (persistentEntity == null) { return Collections.singletonList("*"); @@ -165,6 +148,26 @@ public List getAllColumns(Class typeToRead) { return columnNames; } + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getIdentifierColumns(java.lang.Class) + */ + @Override + public List getIdentifierColumns(Class entityType) { + + RelationalPersistentEntity persistentEntity = getRequiredPersistentEntity(entityType); + + List columnNames = new ArrayList<>(); + for (RelationalPersistentProperty property : persistentEntity) { + + if (property.isIdProperty()) { + columnNames.add(property.getColumnName()); + } + } + + return columnNames; + } + /* * (non-Javadoc) * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getOutboundRow(java.lang.Object) @@ -175,7 +178,7 @@ public OutboundRow getOutboundRow(Object object) { OutboundRow row = new OutboundRow(); - converter.write(object, row); + this.converter.write(object, row); RelationalPersistentEntity entity = getRequiredPersistentEntity(ClassUtils.getUserClass(object)); @@ -198,53 +201,25 @@ private boolean shouldConvertArrayValue(RelationalPersistentProperty property, S private SettableValue getArrayValue(SettableValue value, RelationalPersistentProperty property) { - ArrayColumns arrayColumns = dialect.getArraySupport(); + ArrayColumns arrayColumns = this.dialect.getArraySupport(); if (!arrayColumns.isSupported()) { throw new InvalidDataAccessResourceUsageException( - "Dialect " + dialect.getClass().getName() + " does not support array columns"); + "Dialect " + this.dialect.getClass().getName() + " does not support array columns"); } - return SettableValue.fromOrEmpty(converter.getArrayValue(arrayColumns, property, value.getValue()), + return SettableValue.fromOrEmpty(this.converter.getArrayValue(arrayColumns, property, value.getValue()), property.getActualType()); } - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getMappedSort(java.lang.Class, org.springframework.data.domain.Sort) - */ - @Override - public Sort getMappedSort(Class typeToRead, Sort sort) { - - RelationalPersistentEntity entity = getPersistentEntity(typeToRead); - if (entity == null) { - return sort; - } - - List mappedOrder = new ArrayList<>(); - - for (Order order : sort) { - - RelationalPersistentProperty persistentProperty = entity.getPersistentProperty(order.getProperty()); - if (persistentProperty == null) { - mappedOrder.add(order); - } else { - mappedOrder - .add(Order.by(persistentProperty.getColumnName()).with(order.getNullHandling()).with(order.getDirection())); - } - } - - return Sort.by(mappedOrder); - } - /* * (non-Javadoc) * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getRowMapper(java.lang.Class) */ @Override public BiFunction getRowMapper(Class typeToRead) { - return new EntityRowMapper<>(typeToRead, converter); + return new EntityRowMapper<>(typeToRead, this.converter); } /* @@ -258,11 +233,11 @@ public String getTableName(Class type) { /* * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getStatements() + * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getStatementMapper() */ @Override - public StatementFactory getStatements() { - return this.statements; + public StatementMapper getStatementMapper() { + return this.statementMapper; } /* @@ -271,67 +246,27 @@ public StatementFactory getStatements() { */ @Override public BindMarkersFactory getBindMarkersFactory() { - return dialect.getBindMarkersFactory(); - } - - private RelationalPersistentEntity getRequiredPersistentEntity(Class typeToRead) { - return mappingContext.getRequiredPersistentEntity(typeToRead); - } - - @Nullable - private RelationalPersistentEntity getPersistentEntity(Class typeToRead) { - return mappingContext.getPersistentEntity(typeToRead); + return this.dialect.getBindMarkersFactory(); } /* * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#select(java.lang.String, java.util.Set, org.springframework.data.domain.Sort, org.springframework.data.domain.Pageable) + * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getConverter() */ - @Override - public String select(String tableName, Set columns, Sort sort, Pageable page) { - - Table table = Table.create(tableName); - - Collection selectList; - - if (columns.isEmpty()) { - selectList = Collections.singletonList(table.asterisk()); - } else { - selectList = table.columns(columns); - } - - SelectFromAndOrderBy selectBuilder = StatementBuilder // - .select(selectList) // - .from(tableName) // - .orderBy(createOrderByFields(table, sort)); - - OptionalLong limit = OptionalLong.empty(); - OptionalLong offset = OptionalLong.empty(); - - if (page.isPaged()) { - limit = OptionalLong.of(page.getPageSize()); - offset = OptionalLong.of(page.getOffset()); - } - - // See https://github.com/spring-projects/spring-data-r2dbc/issues/55 - return StatementRenderUtil.render(selectBuilder.build(), limit, offset, this.dialect); + public R2dbcConverter getConverter() { + return this.converter; } - private Collection createOrderByFields(Table table, Sort sortToUse) { - - List fields = new ArrayList<>(); - - for (Order order : sortToUse) { - - OrderByField orderByField = OrderByField.from(table.column(order.getProperty())); + public MappingContext, ? extends RelationalPersistentProperty> getMappingContext() { + return this.mappingContext; + } - if (order.getDirection() != null) { - fields.add(order.isAscending() ? orderByField.asc() : orderByField.desc()); - } else { - fields.add(orderByField); - } - } + private RelationalPersistentEntity getRequiredPersistentEntity(Class typeToRead) { + return this.mappingContext.getRequiredPersistentEntity(typeToRead); + } - return fields; + @Nullable + private RelationalPersistentEntity getPersistentEntity(Class typeToRead) { + return this.mappingContext.getPersistentEntity(typeToRead); } } diff --git a/src/main/java/org/springframework/data/r2dbc/function/DefaultStatementFactory.java b/src/main/java/org/springframework/data/r2dbc/function/DefaultStatementFactory.java index 980a6ce4..5e713db9 100644 --- a/src/main/java/org/springframework/data/r2dbc/function/DefaultStatementFactory.java +++ b/src/main/java/org/springframework/data/r2dbc/function/DefaultStatementFactory.java @@ -15,455 +15,21 @@ */ package org.springframework.data.r2dbc.function; -import io.r2dbc.spi.Statement; -import lombok.Getter; import lombok.RequiredArgsConstructor; -import java.util.ArrayList; -import java.util.Collection; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiConsumer; -import java.util.function.BiFunction; -import java.util.function.Consumer; - -import org.springframework.dao.InvalidDataAccessApiUsageException; -import org.springframework.data.r2dbc.dialect.BindMarker; -import org.springframework.data.r2dbc.dialect.BindMarkers; import org.springframework.data.r2dbc.dialect.Dialect; -import org.springframework.data.r2dbc.domain.BindTarget; -import org.springframework.data.r2dbc.domain.PreparedOperation; -import org.springframework.data.r2dbc.domain.SettableValue; -import org.springframework.data.relational.core.sql.AssignValue; -import org.springframework.data.relational.core.sql.Assignment; -import org.springframework.data.relational.core.sql.Column; -import org.springframework.data.relational.core.sql.Condition; -import org.springframework.data.relational.core.sql.Delete; -import org.springframework.data.relational.core.sql.DeleteBuilder; -import org.springframework.data.relational.core.sql.Expression; -import org.springframework.data.relational.core.sql.Insert; -import org.springframework.data.relational.core.sql.SQL; -import org.springframework.data.relational.core.sql.Select; -import org.springframework.data.relational.core.sql.SelectBuilder; -import org.springframework.data.relational.core.sql.StatementBuilder; -import org.springframework.data.relational.core.sql.Table; -import org.springframework.data.relational.core.sql.Update; -import org.springframework.data.relational.core.sql.UpdateBuilder; import org.springframework.data.relational.core.sql.render.RenderContext; -import org.springframework.data.relational.core.sql.render.SqlRenderer; -import org.springframework.lang.Nullable; -import org.springframework.util.Assert; /** * Default {@link StatementFactory} implementation. * * @author Mark Paluch */ +// TODO: Move DefaultPreparedOperation et al to a better place. Probably StatementMapper. @RequiredArgsConstructor -class DefaultStatementFactory implements StatementFactory { +class DefaultStatementFactory { private final Dialect dialect; private final RenderContext renderContext; - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.StatementFactory#select(java.lang.String, java.util.Collection, java.util.function.Consumer) - */ - @Override - public PreparedOperation getMappedObject(SelectSpec selectSpec, + @Nullable RelationalPersistentEntity entity) { + + Table table = Table.create(selectSpec.getTable()); + List columns = table.columns(selectSpec.getProjectedFields()); + SelectBuilder.SelectFromAndJoin selectBuilder = StatementBuilder.select(columns).from(table); + + BindMarkers bindMarkers = this.dialect.getBindMarkersFactory().create(); + Bindings bindings = Bindings.empty(); + + if (selectSpec.getCriteria() != null) { + + BoundCondition mappedObject = this.updateMapper.getMappedObject(bindMarkers, selectSpec.getCriteria(), table, + entity); + + bindings = mappedObject.getBindings(); + selectBuilder.where(mappedObject.getCondition()); + } + + if (selectSpec.getSort().isSorted()) { + + Sort mappedSort = this.updateMapper.getMappedObject(selectSpec.getSort(), entity); + selectBuilder.orderBy(createOrderByFields(table, mappedSort)); + } + + OptionalLong limit; + OptionalLong offset; + + if (selectSpec.getPage().isPaged()) { + + Pageable page = selectSpec.getPage(); + limit = OptionalLong.of(page.getPageSize()); + offset = OptionalLong.of(page.getOffset()); + } else { + limit = OptionalLong.empty(); + offset = OptionalLong.empty(); + } + + Select select = selectBuilder.build(); + return new DefaultPreparedOperation select(String tableName, Collection columnNames, - Consumer binderConsumer); - - /** - * Creates a {@link Insert} statement. - * - * @param tableName must not be {@literal null} or empty. - * @param generatedKeysNames must not be {@literal null}. - * @param binderConsumer customizer for bindings. Supports only - * {@link StatementBinderBuilder#bind(String, SettableValue)} bindings. - * @return the {@link PreparedOperation} to update values in {@code tableName} assigning bound values. - */ - PreparedOperation insert(String tableName, Collection generatedKeysNames, - Consumer binderConsumer); - - /** - * Creates a {@link Update} statement. - * - * @param tableName must not be {@literal null} or empty. - * @param binderConsumer customizer for bindings. - * @return the {@link PreparedOperation} to update values in {@code tableName} assigning bound values. - */ - PreparedOperation update(String tableName, Consumer binderConsumer); - - /** - * Creates a {@link Delete} statement. - * - * @param tableName must not be {@literal null} or empty. - * @param binderConsumer customizer for bindings. Supports only - * {@link StatementBinderBuilder#filterBy(String, SettableValue)} bindings. - * @return the {@link PreparedOperation} to delete rows from {@code tableName}. - */ - PreparedOperation delete(String tableName, Consumer binderConsumer); - - /** - * Binder to specify parameter bindings by name. Bindings match to equals comparisons. - */ - interface StatementBinderBuilder { - - /** - * Bind the given Id {@code value} to this builder using the underlying binding strategy to express a filter - * condition. {@link Collection} type values translate to {@code IN} matching. - * - * @param identifier named identifier that is considered by the underlying binding strategy. - * @param settable must not be {@literal null}. Use {@link SettableValue#empty(Class)} for {@code NULL} values. - */ - void filterBy(String identifier, SettableValue settable); - - /** - * Bind the given {@code value} to this builder using the underlying binding strategy. - * - * @param identifier named identifier that is considered by the underlying binding strategy. - * @param settable must not be {@literal null}. Use {@link SettableValue#empty(Class)} for {@code NULL} values. - */ - void bind(String identifier, SettableValue settable); - } -} diff --git a/src/main/java/org/springframework/data/r2dbc/function/StatementMapper.java b/src/main/java/org/springframework/data/r2dbc/function/StatementMapper.java new file mode 100644 index 00000000..0f03e37c --- /dev/null +++ b/src/main/java/org/springframework/data/r2dbc/function/StatementMapper.java @@ -0,0 +1,377 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.function; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.r2dbc.dialect.BindMarkers; +import org.springframework.data.r2dbc.dialect.Dialect; +import org.springframework.data.r2dbc.domain.PreparedOperation; +import org.springframework.data.r2dbc.domain.SettableValue; +import org.springframework.data.r2dbc.function.query.Criteria; +import org.springframework.data.r2dbc.function.query.Update; +import org.springframework.lang.Nullable; + +/** + * Mapper for statement specifications to {@link PreparedOperation}. Statement mapping applies a + * {@link Dialect}-specific transformation considering {@link BindMarkers} and vendor-specific SQL differences. + * + * @author Mark Paluch + */ +public interface StatementMapper { + + /** + * Create a typed {@link StatementMapper} that considers type-specific mapping metadata. + * + * @param type must not be {@literal null}. + * @param + * @return the typed {@link StatementMapper}. + */ + TypedStatementMapper forType(Class type); + + /** + * Map a select specification to a {@link PreparedOperation}. + * + * @param selectSpec the insert operation definition, must not be {@literal null}. + * @return the {@link PreparedOperation} for {@link SelectSpec}. + */ + PreparedOperation getMappedObject(SelectSpec selectSpec); + + /** + * Map a insert specification to a {@link PreparedOperation}. + * + * @param insertSpec the insert operation definition, must not be {@literal null}. + * @return the {@link PreparedOperation} for {@link InsertSpec}. + */ + PreparedOperation getMappedObject(InsertSpec insertSpec); + + /** + * Map a update specification to a {@link PreparedOperation}. + * + * @param updateSpec the update operation definition, must not be {@literal null}. + * @return the {@link PreparedOperation} for {@link UpdateSpec}. + */ + PreparedOperation getMappedObject(UpdateSpec updateSpec); + + /** + * Map a delete specification to a {@link PreparedOperation}. + * + * @param deleteSpec the update operation definition, must not be {@literal null}. + * @return the {@link PreparedOperation} for {@link DeleteSpec}. + */ + PreparedOperation getMappedObject(DeleteSpec deleteSpec); + + /** + * Extension to {@link StatementMapper} that is associated with a type. + * + * @param + */ + interface TypedStatementMapper extends StatementMapper {} + + /** + * Create a {@code SELECT} specification for {@code table}. + * + * @param table + * @return the {@link SelectSpec}. + */ + default SelectSpec createSelect(String table) { + return SelectSpec.create(table); + } + + /** + * Create an {@code INSERT} specification for {@code table}. + * + * @param table + * @return the {@link InsertSpec}. + */ + default InsertSpec createInsert(String table) { + return InsertSpec.create(table); + } + + /** + * Create an {@code UPDATE} specification for {@code table}. + * + * @param table + * @return the {@link UpdateSpec}. + */ + default UpdateSpec createUpdate(String table, Update update) { + return UpdateSpec.create(table, update); + } + + /** + * Create a {@code DELETE} specification for {@code table}. + * + * @param table + * @return the {@link DeleteSpec}. + */ + default DeleteSpec createDelete(String table) { + return DeleteSpec.create(table); + } + + /** + * {@code SELECT} specification. + */ + class SelectSpec { + + private final String table; + private final List projectedFields; + private final @Nullable Criteria criteria; + private final Sort sort; + private final Pageable page; + + protected SelectSpec(String table, List projectedFields, @Nullable Criteria criteria, Sort sort, + Pageable page) { + this.table = table; + this.projectedFields = projectedFields; + this.criteria = criteria; + this.sort = sort; + this.page = page; + } + + /** + * Create an {@code SELECT} specification for {@code table}. + * + * @param table + * @return the {@link SelectSpec}. + */ + public static SelectSpec create(String table) { + return new SelectSpec(table, Collections.emptyList(), null, Sort.unsorted(), Pageable.unpaged()); + } + + /** + * Associate {@code projectedFields} with the select and create a new {@link SelectSpec}. + * + * @param projectedFields + * @return the {@link SelectSpec}. + */ + public SelectSpec withProjection(Collection projectedFields) { + + List fields = new ArrayList<>(this.projectedFields); + fields.addAll(projectedFields); + + return new SelectSpec(this.table, fields, this.criteria, this.sort, this.page); + } + + /** + * Associate a {@link Criteria} with the select and return a new {@link SelectSpec}. + * + * @param criteria + * @return the {@link SelectSpec}. + */ + public SelectSpec withCriteria(Criteria criteria) { + return new SelectSpec(this.table, this.projectedFields, criteria, this.sort, this.page); + } + + /** + * Associate {@link Sort} with the select and create a new {@link SelectSpec}. + * + * @param sort + * @return the {@link SelectSpec}. + */ + public SelectSpec withSort(Sort sort) { + return new SelectSpec(this.table, this.projectedFields, this.criteria, sort, this.page); + } + + /** + * Associate a {@link Pageable} with the select and create a new {@link SelectSpec}. + * + * @param page + * @return the {@link SelectSpec}. + */ + public SelectSpec withPage(Pageable page) { + + if (page.isPaged()) { + + Sort sort = page.getSort(); + + return new SelectSpec(this.table, this.projectedFields, this.criteria, sort.isSorted() ? sort : this.sort, + page); + } + + return new SelectSpec(this.table, this.projectedFields, this.criteria, this.sort, page); + } + + public String getTable() { + return this.table; + } + + public List getProjectedFields() { + return Collections.unmodifiableList(this.projectedFields); + } + + @Nullable + public Criteria getCriteria() { + return this.criteria; + } + + public Sort getSort() { + return this.sort; + } + + public Pageable getPage() { + return this.page; + } + } + + /** + * {@code INSERT} specification. + */ + class InsertSpec { + + private final String table; + private final Map assignments; + + protected InsertSpec(String table, Map assignments) { + this.table = table; + this.assignments = assignments; + } + + /** + * Create an {@code INSERT} specification for {@code table}. + * + * @param table + * @return the {@link InsertSpec}. + */ + public static InsertSpec create(String table) { + return new InsertSpec(table, Collections.emptyMap()); + } + + /** + * Associate a column with a {@link SettableValue} and create a new {@link InsertSpec}. + * + * @param column + * @param value + * @return the {@link InsertSpec}. + */ + public InsertSpec withColumn(String column, SettableValue value) { + + Map values = new LinkedHashMap<>(this.assignments); + values.put(column, value); + + return new InsertSpec(this.table, values); + } + + public String getTable() { + return this.table; + } + + public Map getAssignments() { + return Collections.unmodifiableMap(this.assignments); + } + } + + /** + * {@code UPDATE} specification. + */ + class UpdateSpec { + + private final String table; + private final Update update; + + private final @Nullable Criteria criteria; + + protected UpdateSpec(String table, Update update, @Nullable Criteria criteria) { + + this.table = table; + this.update = update; + this.criteria = criteria; + } + + /** + * Create an {@code INSERT} specification for {@code table}. + * + * @param table + * @return the {@link InsertSpec}. + */ + public static UpdateSpec create(String table, Update update) { + return new UpdateSpec(table, update, null); + } + + /** + * Associate a {@link Criteria} with the update and return a new {@link UpdateSpec}. + * + * @param criteria + * @return the {@link UpdateSpec}. + */ + public UpdateSpec withCriteria(Criteria criteria) { + return new UpdateSpec(this.table, this.update, criteria); + } + + public String getTable() { + return this.table; + } + + public Update getUpdate() { + return this.update; + } + + @Nullable + public Criteria getCriteria() { + return this.criteria; + } + } + + /** + * {@code DELETE} specification. + */ + class DeleteSpec { + + private final String table; + + private final @Nullable Criteria criteria; + + protected DeleteSpec(String table, @Nullable Criteria criteria) { + this.table = table; + this.criteria = criteria; + } + + /** + * Create an {@code DELETE} specification for {@code table}. + * + * @param table + * @return the {@link DeleteSpec}. + */ + public static DeleteSpec create(String table) { + return new DeleteSpec(table, null); + } + + /** + * Associate a {@link Criteria} with the delete and return a new {@link DeleteSpec}. + * + * @param criteria + * @return the {@link DeleteSpec}. + */ + public DeleteSpec withCriteria(Criteria criteria) { + return new DeleteSpec(this.table, criteria); + } + + public String getTable() { + return this.table; + } + + @Nullable + public Criteria getCriteria() { + return this.criteria; + } + } +} diff --git a/src/main/java/org/springframework/data/r2dbc/function/query/BoundAssignments.java b/src/main/java/org/springframework/data/r2dbc/function/query/BoundAssignments.java new file mode 100644 index 00000000..0a3bd3f1 --- /dev/null +++ b/src/main/java/org/springframework/data/r2dbc/function/query/BoundAssignments.java @@ -0,0 +1,51 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.function.query; + +import java.util.List; + +import org.springframework.data.r2dbc.dialect.Bindings; +import org.springframework.data.relational.core.sql.Assignment; +import org.springframework.util.Assert; + +/** + * Value object representing {@link Assignment}s with their {@link Bindings}. + * + * @author Mark Paluch + */ +public class BoundAssignments { + + private final Bindings bindings; + + private final List assignments; + + public BoundAssignments(Bindings bindings, List assignments) { + + Assert.notNull(bindings, "Bindings must not be null!"); + Assert.notNull(assignments, "Assignments must not be null!"); + + this.bindings = bindings; + this.assignments = assignments; + } + + public Bindings getBindings() { + return bindings; + } + + public List getAssignments() { + return assignments; + } +} diff --git a/src/main/java/org/springframework/data/r2dbc/function/query/BoundCondition.java b/src/main/java/org/springframework/data/r2dbc/function/query/BoundCondition.java new file mode 100644 index 00000000..a9cba313 --- /dev/null +++ b/src/main/java/org/springframework/data/r2dbc/function/query/BoundCondition.java @@ -0,0 +1,49 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.function.query; + +import org.springframework.data.r2dbc.dialect.Bindings; +import org.springframework.data.relational.core.sql.Condition; +import org.springframework.util.Assert; + +/** + * Value object representing a {@link Condition} with its {@link Bindings}. + * + * @author Mark Paluch + */ +public class BoundCondition { + + private final Bindings bindings; + + private final Condition condition; + + public BoundCondition(Bindings bindings, Condition condition) { + + Assert.notNull(bindings, "Bindings must not be null!"); + Assert.notNull(condition, "Condition must not be null!"); + + this.bindings = bindings; + this.condition = condition; + } + + public Bindings getBindings() { + return bindings; + } + + public Condition getCondition() { + return condition; + } +} diff --git a/src/main/java/org/springframework/data/r2dbc/function/query/Criteria.java b/src/main/java/org/springframework/data/r2dbc/function/query/Criteria.java new file mode 100644 index 00000000..30fa6a11 --- /dev/null +++ b/src/main/java/org/springframework/data/r2dbc/function/query/Criteria.java @@ -0,0 +1,439 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.function.query; + +import lombok.RequiredArgsConstructor; + +import java.util.Arrays; +import java.util.Collection; + +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Central class for creating queries. It follows a fluent API style so that you can easily chain together multiple + * criteria. Static import of the {@code Criteria.property(…)} method will improve readability as in + * {@code where(property(…).is(…)}. + * + * @author Mark Paluch + */ +public class Criteria { + + private final @Nullable Criteria previous; + private final Combinator combinator; + + private final String column; + private final Comparator comparator; + private final @Nullable Object value; + + private Criteria(String column, Comparator comparator, @Nullable Object value) { + this(null, Combinator.INITIAL, column, comparator, value); + } + + private Criteria(@Nullable Criteria previous, Combinator combinator, String column, Comparator comparator, + @Nullable Object value) { + + this.previous = previous; + this.combinator = combinator; + this.column = column; + this.comparator = comparator; + this.value = value; + } + + /** + * Static factory method to create a Criteria using the provided {@code column} name. + * + * @param column Must not be {@literal null} or empty. + * @return a new {@link CriteriaStep} object to complete the first {@link Criteria}. + */ + public static CriteriaStep where(String column) { + + Assert.hasText(column, "Column name must not be null or empty!"); + + return new DefaultCriteriaStep(column); + } + + /** + * Create a new {@link Criteria} and combine it with {@code AND} using the provided {@code column} name. + * + * @param column Must not be {@literal null} or empty. + * @return a new {@link CriteriaStep} object to complete the next {@link Criteria}. + */ + public CriteriaStep and(String column) { + + Assert.hasText(column, "Column name must not be null or empty!"); + + return new DefaultCriteriaStep(column) { + @Override + protected Criteria createCriteria(Comparator comparator, Object value) { + return new Criteria(Criteria.this, Combinator.AND, column, comparator, value); + } + }; + } + + /** + * Create a new {@link Criteria} and combine it with {@code OR} using the provided {@code column} name. + * + * @param column Must not be {@literal null} or empty. + * @return a new {@link CriteriaStep} object to complete the next {@link Criteria}. + */ + public CriteriaStep or(String column) { + + Assert.hasText(column, "Column name must not be null or empty!"); + + return new DefaultCriteriaStep(column) { + @Override + protected Criteria createCriteria(Comparator comparator, Object value) { + return new Criteria(Criteria.this, Combinator.OR, column, comparator, value); + } + }; + } + + /** + * @return the previous {@link Criteria} object. Can be {@literal null} if there is no previous {@link Criteria}. + * @see #hasPrevious() + */ + @Nullable + Criteria getPrevious() { + return previous; + } + + /** + * @return {@literal true} if this {@link Criteria} has a previous one. + */ + boolean hasPrevious() { + return previous != null; + } + + /** + * @return {@link Combinator} to combine this criteria with a previous one. + */ + Combinator getCombinator() { + return combinator; + } + + /** + * @return the property name. + */ + String getColumn() { + return column; + } + + /** + * @return {@link Comparator}. + */ + Comparator getComparator() { + return comparator; + } + + /** + * @return the comparison value. Can be {@literal null}. + */ + @Nullable + Object getValue() { + return value; + } + + enum Comparator { + EQ, NEQ, LT, LTE, GT, GTE, IS_NULL, IS_NOT_NULL, LIKE, NOT_IN, IN, + } + + enum Combinator { + INITIAL, AND, OR; + } + + /** + * Interface declaring terminal builder methods to build a {@link Criteria}. + */ + public interface CriteriaStep { + + /** + * Creates a {@link Criteria} using equality. + * + * @param value + * @return + */ + Criteria is(Object value); + + /** + * Creates a {@link Criteria} using equality (is not). + * + * @param value + * @return + */ + Criteria not(Object value); + + /** + * Creates a {@link Criteria} using {@code IN}. + * + * @param values + * @return + */ + Criteria in(Object... values); + + /** + * Creates a {@link Criteria} using {@code IN}. + * + * @param values + * @return + */ + Criteria in(Collection values); + + /** + * Creates a {@link Criteria} using {@code NOT IN}. + * + * @param values + * @return + */ + Criteria notIn(Object... values); + + /** + * Creates a {@link Criteria} using {@code NOT IN}. + * + * @param values + * @return + */ + Criteria notIn(Collection values); + + /** + * Creates a {@link Criteria} using less-than ({@literal <}). + * + * @param value + * @return + */ + Criteria lessThan(Object value); + + /** + * Creates a {@link Criteria} using less-than or equal to ({@literal <=}). + * + * @param value + * @return + */ + Criteria lessThanOrEquals(Object value); + + /** + * Creates a {@link Criteria} using greater-than({@literal >}). + * + * @param value + * @return + */ + Criteria greaterThan(Object value); + + /** + * Creates a {@link Criteria} using greater-than or equal to ({@literal >=}). + * + * @param value + * @return + */ + Criteria greaterThanOrEquals(Object value); + + /** + * Creates a {@link Criteria} using {@code LIKE}. + * + * @param value + * @return + */ + Criteria like(Object value); + + /** + * Creates a {@link Criteria} using {@code IS NULL}. + * + * @return + */ + Criteria isNull(); + + /** + * Creates a {@link Criteria} using {@code IS NOT NULL}. + * + * @return + */ + Criteria isNotNull(); + } + + /** + * Default {@link CriteriaStep} implementation. + */ + @RequiredArgsConstructor + static class DefaultCriteriaStep implements CriteriaStep { + + private final String property; + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Criteria.CriteriaStep#is(java.lang.Object) + */ + @Override + public Criteria is(Object value) { + + Assert.notNull(value, "Value must not be null!"); + + return createCriteria(Comparator.EQ, value); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Criteria.CriteriaStep#not(java.lang.Object) + */ + @Override + public Criteria not(Object value) { + + Assert.notNull(value, "Value must not be null!"); + + return createCriteria(Comparator.NEQ, value); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Criteria.CriteriaStep#in(java.lang.Object[]) + */ + @Override + public Criteria in(Object... values) { + + Assert.notNull(values, "Values must not be null!"); + + if (values.length > 1 && values[1] instanceof Collection) { + throw new InvalidDataAccessApiUsageException( + "You can only pass in one argument of type " + values[1].getClass().getName()); + } + + return createCriteria(Comparator.IN, Arrays.asList(values)); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Criteria.CriteriaStep#in(java.util.Collection) + */ + @Override + public Criteria in(Collection values) { + + Assert.notNull(values, "Values must not be null!"); + + return createCriteria(Comparator.IN, values); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Criteria.CriteriaStep#notIn(java.lang.Object[]) + */ + @Override + public Criteria notIn(Object... values) { + + Assert.notNull(values, "Values must not be null!"); + + if (values.length > 1 && values[1] instanceof Collection) { + throw new InvalidDataAccessApiUsageException( + "You can only pass in one argument of type " + values[1].getClass().getName()); + } + + return createCriteria(Comparator.NOT_IN, Arrays.asList(values)); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Criteria.CriteriaStep#notIn(java.util.Collection) + */ + @Override + public Criteria notIn(Collection values) { + + Assert.notNull(values, "Values must not be null!"); + + return createCriteria(Comparator.NOT_IN, values); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Criteria.CriteriaStep#lessThan(java.lang.Object) + */ + @Override + public Criteria lessThan(Object value) { + + Assert.notNull(value, "Value must not be null!"); + + return createCriteria(Comparator.LT, value); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Criteria.CriteriaStep#lessThanOrEquals(java.lang.Object) + */ + @Override + public Criteria lessThanOrEquals(Object value) { + + Assert.notNull(value, "Value must not be null!"); + + return createCriteria(Comparator.LTE, value); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Criteria.CriteriaStep#greaterThan(java.lang.Object) + */ + @Override + public Criteria greaterThan(Object value) { + + Assert.notNull(value, "Value must not be null!"); + + return createCriteria(Comparator.GT, value); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Criteria.CriteriaStep#greaterThanOrEquals(java.lang.Object) + */ + @Override + public Criteria greaterThanOrEquals(Object value) { + + Assert.notNull(value, "Value must not be null!"); + + return createCriteria(Comparator.GTE, value); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Criteria.CriteriaStep#like(java.lang.Object) + */ + @Override + public Criteria like(Object value) { + + Assert.notNull(value, "Value must not be null!"); + + return createCriteria(Comparator.LIKE, value); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Criteria.CriteriaStep#isNull() + */ + @Override + public Criteria isNull() { + return createCriteria(Comparator.IS_NULL, null); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.r2dbc.function.query.Criteria.CriteriaStep#isNotNull() + */ + @Override + public Criteria isNotNull() { + return createCriteria(Comparator.IS_NOT_NULL, null); + } + + protected Criteria createCriteria(Comparator comparator, Object value) { + return new Criteria(property, comparator, value); + } + } +} diff --git a/src/main/java/org/springframework/data/r2dbc/function/query/QueryMapper.java b/src/main/java/org/springframework/data/r2dbc/function/query/QueryMapper.java new file mode 100644 index 00000000..18229f36 --- /dev/null +++ b/src/main/java/org/springframework/data/r2dbc/function/query/QueryMapper.java @@ -0,0 +1,432 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.function.query; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.springframework.data.domain.Sort; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PropertyPath; +import org.springframework.data.mapping.PropertyReferenceException; +import org.springframework.data.mapping.context.InvalidPersistentPropertyPath; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.r2dbc.dialect.BindMarker; +import org.springframework.data.r2dbc.dialect.BindMarkers; +import org.springframework.data.r2dbc.dialect.Bindings; +import org.springframework.data.r2dbc.dialect.MutableBindings; +import org.springframework.data.r2dbc.domain.SettableValue; +import org.springframework.data.r2dbc.function.convert.R2dbcConverter; +import org.springframework.data.r2dbc.function.query.Criteria.Combinator; +import org.springframework.data.r2dbc.function.query.Criteria.Comparator; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.Condition; +import org.springframework.data.relational.core.sql.Expression; +import org.springframework.data.relational.core.sql.SQL; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.util.ClassTypeInformation; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +/** + * Maps {@link Criteria} and {@link Sort} objects considering mapping metadata and dialect-specific conversion. + * + * @author Mark Paluch + */ +public class QueryMapper { + + private final R2dbcConverter converter; + private final MappingContext, RelationalPersistentProperty> mappingContext; + + /** + * Creates a new {@link QueryMapper} with the given {@link R2dbcConverter}. + * + * @param converter must not be {@literal null}. + */ + @SuppressWarnings({ "unchecked", "rawtypes" }) + public QueryMapper(R2dbcConverter converter) { + + Assert.notNull(converter, "R2dbcConverter must not be null!"); + + this.converter = converter; + this.mappingContext = (MappingContext) converter.getMappingContext(); + } + + /** + * Map the {@link Sort} object to apply field name mapping using {@link Class the type to read}. + * + * @param sort must not be {@literal null}. + * @param entity related {@link RelationalPersistentEntity}, can be {@literal null}. + * @return + */ + public Sort getMappedObject(Sort sort, @Nullable RelationalPersistentEntity entity) { + + if (entity == null) { + return sort; + } + + List mappedOrder = new ArrayList<>(); + + for (Sort.Order order : sort) { + + RelationalPersistentProperty persistentProperty = entity.getPersistentProperty(order.getProperty()); + if (persistentProperty == null) { + mappedOrder.add(order); + } else { + mappedOrder.add( + Sort.Order.by(persistentProperty.getColumnName()).with(order.getNullHandling()).with(order.getDirection())); + } + } + + return Sort.by(mappedOrder); + } + + /** + * Map a {@link Criteria} object into {@link Condition} and consider value/{@code NULL} {@link Bindings}. + * + * @param markers bind markers object, must not be {@literal null}. + * @param criteria criteria definition to map, must not be {@literal null}. + * @param table must not be {@literal null}. + * @param entity related {@link RelationalPersistentEntity}, can be {@literal null}. + * @return the mapped {@link BoundCondition}. + */ + public BoundCondition getMappedObject(BindMarkers markers, Criteria criteria, Table table, + @Nullable RelationalPersistentEntity entity) { + + Assert.notNull(markers, "BindMarkers must not be null!"); + Assert.notNull(criteria, "Criteria must not be null!"); + Assert.notNull(table, "Table must not be null!"); + + Criteria current = criteria; + MutableBindings bindings = new MutableBindings(markers); + + // reverse unroll criteria chain + Map forwardChain = new HashMap<>(); + + while (current.hasPrevious()) { + forwardChain.put(current.getPrevious(), current); + current = current.getPrevious(); + } + + // perform the actual mapping + Condition mapped = getCondition(current, bindings, table, entity); + while (forwardChain.containsKey(current)) { + + Criteria nextCriteria = forwardChain.get(current); + + if (nextCriteria.getCombinator() == Combinator.AND) { + mapped = mapped.and(getCondition(nextCriteria, bindings, table, entity)); + } + + if (nextCriteria.getCombinator() == Combinator.OR) { + mapped = mapped.or(getCondition(nextCriteria, bindings, table, entity)); + } + + current = nextCriteria; + } + + return new BoundCondition(bindings, mapped); + } + + private Condition getCondition(Criteria criteria, MutableBindings bindings, Table table, + @Nullable RelationalPersistentEntity entity) { + + Field propertyField = createPropertyField(entity, criteria.getColumn(), this.mappingContext); + Column column = table.column(propertyField.getMappedColumnName()); + TypeInformation actualType = propertyField.getTypeHint().getRequiredActualType(); + + Object mappedValue; + Class typeHint; + + if (criteria.getValue() instanceof SettableValue) { + + SettableValue settableValue = (SettableValue) criteria.getValue(); + + mappedValue = convertValue(settableValue.getValue(), propertyField.getTypeHint()); + typeHint = getTypeHint(mappedValue, actualType.getType(), settableValue); + + } else { + + mappedValue = convertValue(criteria.getValue(), propertyField.getTypeHint()); + typeHint = actualType.getType(); + } + + return createCondition(column, mappedValue, typeHint, bindings, criteria.getComparator()); + } + + @Nullable + protected Object convertValue(@Nullable Object value, TypeInformation typeInformation) { + + if (value == null) { + return null; + } + + if (typeInformation.isCollectionLike()) { + this.converter.writeValue(value, typeInformation); + } else if (value instanceof Iterable) { + + List mapped = new ArrayList<>(); + + for (Object o : (Iterable) value) { + + mapped.add(this.converter.writeValue(o, typeInformation)); + } + return mapped; + } + + return this.converter.writeValue(value, typeInformation); + } + + protected MappingContext, RelationalPersistentProperty> getMappingContext() { + return this.mappingContext; + } + + private Condition createCondition(Column column, @Nullable Object mappedValue, Class valueType, + MutableBindings bindings, Comparator comparator) { + + switch (comparator) { + case IS_NULL: + return column.isNull(); + case IS_NOT_NULL: + return column.isNotNull(); + } + + if (comparator == Comparator.NOT_IN || comparator == Comparator.IN) { + + Condition condition; + if (mappedValue instanceof Iterable) { + + List expressions = new ArrayList<>( + mappedValue instanceof Collection ? ((Collection) mappedValue).size() : 10); + + for (Object o : (Iterable) mappedValue) { + + BindMarker bindMarker = bindings.nextMarker(column.getName()); + expressions.add(bind(o, valueType, bindings, bindMarker)); + } + + condition = column.in(expressions.toArray(new Expression[0])); + + } else { + + BindMarker bindMarker = bindings.nextMarker(column.getName()); + Expression expression = bind(mappedValue, valueType, bindings, bindMarker); + + condition = column.in(expression); + } + + if (comparator == Comparator.NOT_IN) { + condition = condition.not(); + } + + return condition; + } + + BindMarker bindMarker = bindings.nextMarker(column.getName()); + Expression expression = bind(mappedValue, valueType, bindings, bindMarker); + + switch (comparator) { + case EQ: + return column.isEqualTo(expression); + case NEQ: + return column.isNotEqualTo(expression); + case LT: + return column.isLess(expression); + case LTE: + return column.isLessOrEqualTo(expression); + case GT: + return column.isGreater(expression); + case GTE: + return column.isGreaterOrEqualTo(expression); + case LIKE: + return column.like(expression); + } + + throw new UnsupportedOperationException("Comparator " + comparator + " not supported"); + } + + Field createPropertyField(@Nullable RelationalPersistentEntity entity, String key, + MappingContext, RelationalPersistentProperty> mappingContext) { + return entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext); + } + + Class getTypeHint(Object mappedValue, Class propertyType, SettableValue settableValue) { + + if (mappedValue == null || propertyType.equals(Object.class)) { + return settableValue.getType(); + } + + if (mappedValue.getClass().equals(settableValue.getValue().getClass())) { + return settableValue.getType(); + } + + return propertyType; + } + + private Expression bind(@Nullable Object mappedValue, Class valueType, MutableBindings bindings, + BindMarker bindMarker) { + + if (mappedValue != null) { + bindings.bind(bindMarker, mappedValue); + } else { + bindings.bindNull(bindMarker, valueType); + } + + return SQL.bindMarker(bindMarker.getPlaceholder()); + } + + /** + * Value object to represent a field and its meta-information. + */ + protected static class Field { + + protected final String name; + + /** + * Creates a new {@link Field} without meta-information but the given name. + * + * @param name must not be {@literal null} or empty. + */ + public Field(String name) { + + Assert.hasText(name, "Name must not be null!"); + this.name = name; + } + + /** + * Returns the key to be used in the mapped document eventually. + * + * @return + */ + public String getMappedColumnName() { + return this.name; + } + + public TypeInformation getTypeHint() { + return ClassTypeInformation.OBJECT; + } + } + + /** + * Extension of {@link Field} to be backed with mapping metadata. + */ + protected static class MetadataBackedField extends Field { + + private final RelationalPersistentEntity entity; + private final MappingContext, RelationalPersistentProperty> mappingContext; + private final RelationalPersistentProperty property; + private final @Nullable PersistentPropertyPath path; + + /** + * Creates a new {@link MetadataBackedField} with the given name, {@link RelationalPersistentEntity} and + * {@link MappingContext}. + * + * @param name must not be {@literal null} or empty. + * @param entity must not be {@literal null}. + * @param context must not be {@literal null}. + */ + protected MetadataBackedField(String name, RelationalPersistentEntity entity, + MappingContext, RelationalPersistentProperty> context) { + this(name, entity, context, null); + } + + /** + * Creates a new {@link MetadataBackedField} with the given name, {@link RelationalPersistentEntity} and + * {@link MappingContext} with the given {@link RelationalPersistentProperty}. + * + * @param name must not be {@literal null} or empty. + * @param entity must not be {@literal null}. + * @param context must not be {@literal null}. + * @param property may be {@literal null}. + */ + protected MetadataBackedField(String name, RelationalPersistentEntity entity, + MappingContext, RelationalPersistentProperty> context, + @Nullable RelationalPersistentProperty property) { + + super(name); + + Assert.notNull(entity, "MongoPersistentEntity must not be null!"); + + this.entity = entity; + this.mappingContext = context; + + this.path = getPath(name); + this.property = this.path == null ? property : this.path.getLeafProperty(); + } + + @Override + public String getMappedColumnName() { + return this.path == null ? this.name : this.path.toDotPath(RelationalPersistentProperty::getColumnName); + } + + /** + * Returns the {@link PersistentPropertyPath} for the given {@code pathExpression}. + * + * @param pathExpression + * @return + */ + @Nullable + private PersistentPropertyPath getPath(String pathExpression) { + + try { + + PropertyPath path = PropertyPath.from(pathExpression, this.entity.getTypeInformation()); + + if (isPathToJavaLangClassProperty(path)) { + return null; + } + + return this.mappingContext.getPersistentPropertyPath(path); + } catch (PropertyReferenceException | InvalidPersistentPropertyPath e) { + return null; + } + } + + private boolean isPathToJavaLangClassProperty(PropertyPath path) { + return path.getType().equals(Class.class) && path.getLeafProperty().getOwningType().getType().equals(Class.class); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getTypeHint() + */ + @Override + public TypeInformation getTypeHint() { + + if (this.property == null) { + return super.getTypeHint(); + } + + if (this.property.getActualType().isPrimitive()) { + return ClassTypeInformation.from(ClassUtils.resolvePrimitiveIfNecessary(this.property.getActualType())); + } + + if (this.property.getActualType().isInterface() + || java.lang.reflect.Modifier.isAbstract(this.property.getActualType().getModifiers())) { + return ClassTypeInformation.OBJECT; + } + + return this.property.getTypeInformation(); + } + } +} diff --git a/src/main/java/org/springframework/data/r2dbc/function/query/Update.java b/src/main/java/org/springframework/data/r2dbc/function/query/Update.java new file mode 100644 index 00000000..be2f53b6 --- /dev/null +++ b/src/main/java/org/springframework/data/r2dbc/function/query/Update.java @@ -0,0 +1,86 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.function.query; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.function.BiConsumer; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Class to easily construct SQL update assignments. + * + * @author Mark Paluch + */ +public class Update { + + private static final Update EMPTY = new Update(Collections.emptyMap()); + + private final Map columnsToUpdate; + + private Update(Map columnsToUpdate) { + this.columnsToUpdate = columnsToUpdate; + } + + /** + * Static factory method to create an {@link Update} using the provided column. + * + * @param column + * @param value + * @return + */ + public static Update update(String column, @Nullable Object value) { + return EMPTY.set(column, value); + } + + /** + * Update a column by assigning a value. + * + * @param column + * @param value + * @return + */ + public Update set(String column, @Nullable Object value) { + return addMultiFieldOperation(column, value); + } + + private Update addMultiFieldOperation(String key, Object value) { + + Assert.hasText(key, "Column for update must not be null or blank"); + + Map updates = new LinkedHashMap<>(this.columnsToUpdate); + updates.put(key, value); + + return new Update(updates); + } + + /** + * Performs the given action for each column-value tuple in this {@link Update object} until all entries have been + * processed or the action throws an exception. + * + * @param action must not be {@literal null}. + */ + void forEachColumn(BiConsumer action) { + this.columnsToUpdate.forEach(action); + } + + public Map getAssignments() { + return Collections.unmodifiableMap(this.columnsToUpdate); + } +} diff --git a/src/main/java/org/springframework/data/r2dbc/function/query/UpdateMapper.java b/src/main/java/org/springframework/data/r2dbc/function/query/UpdateMapper.java new file mode 100644 index 00000000..b28a7abf --- /dev/null +++ b/src/main/java/org/springframework/data/r2dbc/function/query/UpdateMapper.java @@ -0,0 +1,140 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.function.query; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.springframework.data.r2dbc.dialect.BindMarker; +import org.springframework.data.r2dbc.dialect.BindMarkers; +import org.springframework.data.r2dbc.dialect.Bindings; +import org.springframework.data.r2dbc.dialect.MutableBindings; +import org.springframework.data.r2dbc.domain.SettableValue; +import org.springframework.data.r2dbc.function.convert.R2dbcConverter; +import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; +import org.springframework.data.relational.core.sql.AssignValue; +import org.springframework.data.relational.core.sql.Assignment; +import org.springframework.data.relational.core.sql.Assignments; +import org.springframework.data.relational.core.sql.Column; +import org.springframework.data.relational.core.sql.SQL; +import org.springframework.data.relational.core.sql.Table; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * A subclass of {@link QueryMapper} that maps {@link Update} to update assignments. + * + * @author Mark Paluch + */ +public class UpdateMapper extends QueryMapper { + + /** + * Creates a new {@link QueryMapper} with the given {@link R2dbcConverter}. + * + * @param converter must not be {@literal null}. + */ + public UpdateMapper(R2dbcConverter converter) { + super(converter); + } + + /** + * Map a {@link Update} object to {@link BoundAssignments} and consider value/{@code NULL} {@link Bindings}. + * + * @param markers bind markers object, must not be {@literal null}. + * @param update update definition to map, must not be {@literal null}. + * @param table must not be {@literal null}. + * @param entity related {@link RelationalPersistentEntity}, can be {@literal null}. + * @return the mapped {@link BoundAssignments}. + */ + public BoundAssignments getMappedObject(BindMarkers markers, Update update, Table table, + @Nullable RelationalPersistentEntity entity) { + return getMappedObject(markers, update.getAssignments(), table, entity); + } + + /** + * Map a {@code assignments} object to {@link BoundAssignments} and consider value/{@code NULL} {@link Bindings}. + * + * @param markers bind markers object, must not be {@literal null}. + * @param assignments update/insert definition to map, must not be {@literal null}. + * @param table must not be {@literal null}. + * @param entity related {@link RelationalPersistentEntity}, can be {@literal null}. + * @return the mapped {@link BoundAssignments}. + */ + public BoundAssignments getMappedObject(BindMarkers markers, Map assignments, Table table, + @Nullable RelationalPersistentEntity entity) { + + Assert.notNull(markers, "BindMarkers must not be null!"); + Assert.notNull(assignments, "Assignments must not be null!"); + Assert.notNull(table, "Table must not be null!"); + + MutableBindings bindings = new MutableBindings(markers); + List result = new ArrayList<>(); + + assignments.forEach((column, value) -> { + Assignment assignment = getAssignment(column, value, bindings, table, entity); + result.add(assignment); + }); + + return new BoundAssignments(bindings, result); + } + + private Assignment getAssignment(String columnName, Object value, MutableBindings bindings, Table table, + @Nullable RelationalPersistentEntity entity) { + + Field propertyField = createPropertyField(entity, columnName, getMappingContext()); + Column column = table.column(propertyField.getMappedColumnName()); + TypeInformation actualType = propertyField.getTypeHint().getRequiredActualType(); + + Object mappedValue; + Class typeHint; + + if (value instanceof SettableValue) { + + SettableValue settableValue = (SettableValue) value; + + mappedValue = convertValue(settableValue.getValue(), propertyField.getTypeHint()); + typeHint = getTypeHint(mappedValue, actualType.getType(), settableValue); + + } else { + + mappedValue = convertValue(value, propertyField.getTypeHint()); + + if (mappedValue == null) { + return Assignments.value(column, SQL.nullLiteral()); + } + + typeHint = actualType.getType(); + } + + return createAssignment(column, mappedValue, typeHint, bindings); + } + + private Assignment createAssignment(Column column, Object value, Class type, MutableBindings bindings) { + + BindMarker bindMarker = bindings.nextMarker(column.getName()); + AssignValue assignValue = Assignments.value(column, SQL.bindMarker(bindMarker.getPlaceholder())); + + if (value == null) { + bindings.bindNull(bindMarker, type); + } else { + bindings.bind(bindMarker, value); + } + + return assignValue; + } +} diff --git a/src/main/java/org/springframework/data/r2dbc/function/query/package-info.java b/src/main/java/org/springframework/data/r2dbc/function/query/package-info.java new file mode 100644 index 00000000..42e57792 --- /dev/null +++ b/src/main/java/org/springframework/data/r2dbc/function/query/package-info.java @@ -0,0 +1,6 @@ +/** + * Query and update support. + */ +@org.springframework.lang.NonNullApi +@org.springframework.lang.NonNullFields +package org.springframework.data.r2dbc.function.query; diff --git a/src/main/java/org/springframework/data/r2dbc/repository/support/SimpleR2dbcRepository.java b/src/main/java/org/springframework/data/r2dbc/repository/support/SimpleR2dbcRepository.java index 347d7c6e..94f43b1f 100644 --- a/src/main/java/org/springframework/data/r2dbc/repository/support/SimpleR2dbcRepository.java +++ b/src/main/java/org/springframework/data/r2dbc/repository/support/SimpleR2dbcRepository.java @@ -21,24 +21,20 @@ import reactor.core.publisher.Mono; import java.util.Collections; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.Set; +import java.util.List; import org.reactivestreams.Publisher; import org.springframework.data.r2dbc.domain.PreparedOperation; -import org.springframework.data.r2dbc.domain.SettableValue; import org.springframework.data.r2dbc.function.DatabaseClient; import org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy; -import org.springframework.data.r2dbc.function.StatementFactory; +import org.springframework.data.r2dbc.function.StatementMapper; import org.springframework.data.r2dbc.function.convert.R2dbcConverter; -import org.springframework.data.relational.core.sql.Delete; +import org.springframework.data.r2dbc.function.query.Criteria; import org.springframework.data.relational.core.sql.Functions; import org.springframework.data.relational.core.sql.Select; import org.springframework.data.relational.core.sql.StatementBuilder; import org.springframework.data.relational.core.sql.Table; -import org.springframework.data.relational.core.sql.Update; import org.springframework.data.relational.core.sql.render.SqlRenderer; import org.springframework.data.relational.repository.query.RelationalEntityInformation; import org.springframework.data.repository.reactive.ReactiveCrudRepository; @@ -65,27 +61,19 @@ public Mono save(S objectToSave) { Assert.notNull(objectToSave, "Object to save must not be null!"); - if (entity.isNew(objectToSave)) { + if (this.entity.isNew(objectToSave)) { - return databaseClient.insert() // - .into(entity.getJavaType()) // - .using(objectToSave) // - .map(converter.populateIdIfNecessary(objectToSave)) // + return this.databaseClient.insert() // + .into(this.entity.getJavaType()) // + .table(this.entity.getTableName()).using(objectToSave) // + .map(this.converter.populateIdIfNecessary(objectToSave)) // .first() // .defaultIfEmpty(objectToSave); } - Object id = entity.getRequiredId(objectToSave); - Map columns = accessStrategy.getOutboundRow(objectToSave); - columns.remove(getIdColumnName()); // do not update the Id column. - String idColumnName = getIdColumnName(); - - PreparedOperation operation = accessStrategy.getStatements().update(entity.getTableName(), binder -> { - columns.forEach(binder::bind); - binder.filterBy(idColumnName, SettableValue.from(id)); - }); - - return databaseClient.execute().sql(operation).as(entity.getJavaType()) // + return this.databaseClient.update() // + .table(this.entity.getJavaType()) // + .table(this.entity.getTableName()).using(objectToSave) // .then() // .thenReturn(objectToSave); } @@ -120,18 +108,18 @@ public Mono findById(ID id) { Assert.notNull(id, "Id must not be null!"); - Set columns = new LinkedHashSet<>(accessStrategy.getAllColumns(entity.getJavaType())); + List columns = this.accessStrategy.getAllColumns(this.entity.getJavaType()); String idColumnName = getIdColumnName(); - StatementFactory statements; + StatementMapper mapper = this.accessStrategy.getStatementMapper().forType(this.entity.getJavaType()); + StatementMapper.SelectSpec selectSpec = mapper.createSelect(this.entity.getTableName()) // + .withProjection(columns) // + .withCriteria(Criteria.where(idColumnName).is(id)); - PreparedOperation operation = accessStrategy.getStatements().select(entity.getTableName(), - Collections.singleton(idColumnName), binder -> { - binder.filterBy(idColumnName, SettableValue.from(id)); - }); + StatementMapper mapper = this.accessStrategy.getStatementMapper().forType(this.entity.getJavaType()); + StatementMapper.SelectSpec selectSpec = mapper.createSelect(this.entity.getTableName()) + .withProjection(Collections.singletonList(idColumnName)) // + .withCriteria(Criteria.where(idColumnName).is(id)); + + PreparedOperation operation = mapper.getMappedObject(selectSpec); - return databaseClient.execute().sql(operation) // + return this.databaseClient.execute().sql(operation) // .map((r, md) -> r) // .first() // .hasElement(); @@ -178,7 +168,7 @@ public Mono existsById(Publisher publisher) { */ @Override public Flux findAll() { - return databaseClient.select().from(entity.getJavaType()).fetch().all(); + return this.databaseClient.select().from(this.entity.getJavaType()).fetch().all(); } /* (non-Javadoc) @@ -206,15 +196,17 @@ public Flux findAllById(Publisher idPublisher) { return Flux.empty(); } - Set columns = new LinkedHashSet<>(accessStrategy.getAllColumns(entity.getJavaType())); + List columns = this.accessStrategy.getAllColumns(this.entity.getJavaType()); String idColumnName = getIdColumnName(); - PreparedOperation select = statements.select("foo", Arrays.asList("bar", "baz"), it -> {}); - - assertThat(select.getSource()).isInstanceOf(Select.class); - assertThat(select.toQuery()).isEqualTo("SELECT foo.bar, foo.baz FROM foo"); - - createBoundStatement(select, connectionMock); - - verifyZeroInteractions(statementMock); - } - - @Test - public void shouldToQuerySimpleSelectWithSimpleFilter() { - - PreparedOperation select = statements.select("foo", Arrays.asList("bar", "baz"), it -> { - it.filterBy("doe", SettableValue.from("John")); - it.filterBy("baz", SettableValue.from("Jake")); - }); - - assertThat(select.getSource()).isInstanceOf(Select.class); - assertThat(select.toQuery()).isEqualTo("SELECT foo.bar, foo.baz FROM foo WHERE foo.doe = $1 AND foo.baz = $2"); - - createBoundStatement(select, connectionMock); - - verify(statementMock).bind(0, "John"); - verify(statementMock).bind(1, "Jake"); - verifyNoMoreInteractions(statementMock); - } - - @Test - public void shouldToQuerySimpleSelectWithNullFilter() { - - PreparedOperation select = statements.select("foo", Arrays.asList("bar", "baz"), it -> { - it.filterBy("doe", SettableValue.from(Arrays.asList("John", "Jake"))); - }); - - assertThat(select.getSource()).isInstanceOf(Select.class); - assertThat(select.toQuery()).isEqualTo("SELECT foo.bar, foo.baz FROM foo WHERE foo.doe IN ($1, $2)"); - - createBoundStatement(select, connectionMock); - verify(statementMock).bind(0, "John"); - verify(statementMock).bind(1, "Jake"); - verifyNoMoreInteractions(statementMock); - } - - @Test - public void shouldFailInsertToQueryingWithoutValueBindings() { - - assertThatThrownBy(() -> statements.insert("foo", Collections.emptyList(), it -> {})) - .isInstanceOf(IllegalStateException.class); - } - - @Test - public void shouldToQuerySimpleInsert() { - - PreparedOperation insert = statements.insert("foo", Collections.emptyList(), it -> { - it.bind("bar", SettableValue.from("Foo")); - }); - - assertThat(insert.getSource()).isInstanceOf(Insert.class); - assertThat(insert.toQuery()).isEqualTo("INSERT INTO foo (bar) VALUES ($1)"); - - createBoundStatement(insert, connectionMock); - verify(statementMock).bind(0, "Foo"); - verifyNoMoreInteractions(statementMock); - } - - @Test - public void shouldFailUpdateToQueryingWithoutValueBindings() { - - assertThatThrownBy(() -> statements.update("foo", it -> it.filterBy("foo", SettableValue.empty(Object.class)))) - .isInstanceOf(IllegalStateException.class); - } - - @Test - public void shouldToQuerySimpleUpdate() { - - PreparedOperation update = statements.update("foo", it -> { - it.bind("bar", SettableValue.from("Foo")); - }); - - assertThat(update.getSource()).isInstanceOf(Update.class); - assertThat(update.toQuery()).isEqualTo("UPDATE foo SET bar = $1"); - - createBoundStatement(update, connectionMock); - verify(statementMock).bind(0, "Foo"); - verifyNoMoreInteractions(statementMock); - } - - @Test - public void shouldToQueryNullUpdate() { - - PreparedOperation update = statements.update("foo", it -> { - it.bind("bar", SettableValue.empty(String.class)); - }); - - assertThat(update.getSource()).isInstanceOf(Update.class); - assertThat(update.toQuery()).isEqualTo("UPDATE foo SET bar = $1"); - - createBoundStatement(update, connectionMock); - verify(statementMock).bindNull(0, String.class); - - verifyNoMoreInteractions(statementMock); - } - - @Test - public void shouldToQueryUpdateWithFilter() { - - PreparedOperation update = statements.update("foo", it -> { - it.bind("bar", SettableValue.from("Foo")); - it.filterBy("baz", SettableValue.from("Baz")); - }); - - assertThat(update.getSource()).isInstanceOf(Update.class); - assertThat(update.toQuery()).isEqualTo("UPDATE foo SET bar = $1 WHERE foo.baz = $2"); - - createBoundStatement(update, connectionMock); - verify(statementMock).bind(0, "Foo"); - verify(statementMock).bind(1, "Baz"); - verifyNoMoreInteractions(statementMock); - } - - @Test - public void shouldToQuerySimpleDeleteWithSimpleFilter() { - - PreparedOperation delete = statements.delete("foo", it -> { - it.filterBy("doe", SettableValue.from("John")); - }); - - assertThat(delete.getSource()).isInstanceOf(Delete.class); - assertThat(delete.toQuery()).isEqualTo("DELETE FROM foo WHERE foo.doe = $1"); - - createBoundStatement(delete, connectionMock); - verify(statementMock).bind(0, "John"); - verifyNoMoreInteractions(statementMock); - } - - @Test - public void shouldToQuerySimpleDeleteWithMultipleFilters() { - - PreparedOperation delete = statements.delete("foo", it -> { - it.filterBy("doe", SettableValue.from("John")); - it.filterBy("baz", SettableValue.from("Jake")); - }); - - assertThat(delete.getSource()).isInstanceOf(Delete.class); - assertThat(delete.toQuery()).isEqualTo("DELETE FROM foo WHERE foo.doe = $1 AND foo.baz = $2"); - - createBoundStatement(delete, connectionMock); - verify(statementMock).bind(0, "John"); - verify(statementMock).bind(1, "Jake"); - verifyNoMoreInteractions(statementMock); - } - - @Test - public void shouldToQuerySimpleDeleteWithNullFilter() { - - PreparedOperation delete = statements.delete("foo", it -> { - it.filterBy("doe", SettableValue.empty(String.class)); - }); - - assertThat(delete.getSource()).isInstanceOf(Delete.class); - assertThat(delete.toQuery()).isEqualTo("DELETE FROM foo WHERE foo.doe IS NULL"); - - createBoundStatement(delete, connectionMock); - verifyZeroInteractions(statementMock); - } - - void createBoundStatement(PreparedOperation operation, Connection connection) { - - Statement statement = connection.createStatement(operation.toQuery()); - operation.bindTo(new DefaultDatabaseClient.StatementWrapper(statement)); - } -} diff --git a/src/test/java/org/springframework/data/r2dbc/function/StatementMapperUnitTests.java b/src/test/java/org/springframework/data/r2dbc/function/StatementMapperUnitTests.java new file mode 100644 index 00000000..bd5ed805 --- /dev/null +++ b/src/test/java/org/springframework/data/r2dbc/function/StatementMapperUnitTests.java @@ -0,0 +1,69 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.function; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.junit.Test; + +import org.springframework.data.r2dbc.dialect.PostgresDialect; +import org.springframework.data.r2dbc.domain.BindTarget; +import org.springframework.data.r2dbc.domain.PreparedOperation; +import org.springframework.data.r2dbc.function.StatementMapper.UpdateSpec; +import org.springframework.data.r2dbc.function.query.Criteria; +import org.springframework.data.r2dbc.function.query.Update; + +/** + * Unit tests for {@link DefaultStatementMapper}. + * + * @author Mark Paluch + */ +public class StatementMapperUnitTests { + + ReactiveDataAccessStrategy strategy = new DefaultReactiveDataAccessStrategy(PostgresDialect.INSTANCE); + StatementMapper mapper = strategy.getStatementMapper(); + + BindTarget bindTarget = mock(BindTarget.class); + + @Test // gh-64 + public void shouldMapUpdate() { + + UpdateSpec updateSpec = mapper.createUpdate("foo", Update.update("column", "value")); + + PreparedOperation preparedOperation = mapper.getMappedObject(updateSpec); + + assertThat(preparedOperation.toQuery()).isEqualTo("UPDATE foo SET column = $1"); + + preparedOperation.bindTo(bindTarget); + verify(bindTarget).bind(0, "value"); + } + + @Test // gh-64 + public void shouldMapUpdateWithCriteria() { + + UpdateSpec updateSpec = mapper.createUpdate("foo", Update.update("column", "value")) + .withCriteria(Criteria.where("foo").is("bar")); + + PreparedOperation preparedOperation = mapper.getMappedObject(updateSpec); + + assertThat(preparedOperation.toQuery()).isEqualTo("UPDATE foo SET column = $1 WHERE foo.foo = $2"); + + preparedOperation.bindTo(bindTarget); + verify(bindTarget).bind(0, "value"); + verify(bindTarget).bind(1, "bar"); + } +} diff --git a/src/test/java/org/springframework/data/r2dbc/function/query/CriteriaUnitTests.java b/src/test/java/org/springframework/data/r2dbc/function/query/CriteriaUnitTests.java new file mode 100644 index 00000000..32d190c5 --- /dev/null +++ b/src/test/java/org/springframework/data/r2dbc/function/query/CriteriaUnitTests.java @@ -0,0 +1,173 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.function.query; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.r2dbc.function.query.Criteria.*; + +import java.util.Arrays; + +import org.junit.Test; + +import org.springframework.data.r2dbc.function.query.Criteria.*; + +/** + * Unit tests for {@link Criteria}. + * + * @author Mark Paluch + */ +public class CriteriaUnitTests { + + @Test // gh-64 + public void andChainedCriteria() { + + Criteria criteria = where("foo").is("bar").and("baz").isNotNull(); + + assertThat(criteria.getColumn()).isEqualTo("baz"); + assertThat(criteria.getComparator()).isEqualTo(Comparator.IS_NOT_NULL); + assertThat(criteria.getValue()).isNull(); + assertThat(criteria.getPrevious()).isNotNull(); + assertThat(criteria.getCombinator()).isEqualTo(Combinator.AND); + + criteria = criteria.getPrevious(); + + assertThat(criteria.getColumn()).isEqualTo("foo"); + assertThat(criteria.getComparator()).isEqualTo(Comparator.EQ); + assertThat(criteria.getValue()).isEqualTo("bar"); + } + + @Test // gh-64 + public void orChainedCriteria() { + + Criteria criteria = where("foo").is("bar").or("baz").isNotNull(); + + assertThat(criteria.getColumn()).isEqualTo("baz"); + assertThat(criteria.getCombinator()).isEqualTo(Combinator.OR); + + criteria = criteria.getPrevious(); + + assertThat(criteria.getPrevious()).isNull(); + assertThat(criteria.getValue()).isEqualTo("bar"); + } + + @Test // gh-64 + public void shouldBuildEqualsCriteria() { + + Criteria criteria = where("foo").is("bar"); + + assertThat(criteria.getColumn()).isEqualTo("foo"); + assertThat(criteria.getComparator()).isEqualTo(Comparator.EQ); + assertThat(criteria.getValue()).isEqualTo("bar"); + } + + @Test // gh-64 + public void shouldBuildNotEqualsCriteria() { + + Criteria criteria = where("foo").not("bar"); + + assertThat(criteria.getColumn()).isEqualTo("foo"); + assertThat(criteria.getComparator()).isEqualTo(Comparator.NEQ); + assertThat(criteria.getValue()).isEqualTo("bar"); + } + + @Test // gh-64 + public void shouldBuildInCriteria() { + + Criteria criteria = where("foo").in("bar", "baz"); + + assertThat(criteria.getColumn()).isEqualTo("foo"); + assertThat(criteria.getComparator()).isEqualTo(Comparator.IN); + assertThat(criteria.getValue()).isEqualTo(Arrays.asList("bar", "baz")); + } + + @Test // gh-64 + public void shouldBuildNotInCriteria() { + + Criteria criteria = where("foo").notIn("bar", "baz"); + + assertThat(criteria.getColumn()).isEqualTo("foo"); + assertThat(criteria.getComparator()).isEqualTo(Comparator.NOT_IN); + assertThat(criteria.getValue()).isEqualTo(Arrays.asList("bar", "baz")); + } + + @Test // gh-64 + public void shouldBuildGtCriteria() { + + Criteria criteria = where("foo").greaterThan(1); + + assertThat(criteria.getColumn()).isEqualTo("foo"); + assertThat(criteria.getComparator()).isEqualTo(Comparator.GT); + assertThat(criteria.getValue()).isEqualTo(1); + } + + @Test // gh-64 + public void shouldBuildGteCriteria() { + + Criteria criteria = where("foo").greaterThanOrEquals(1); + + assertThat(criteria.getColumn()).isEqualTo("foo"); + assertThat(criteria.getComparator()).isEqualTo(Comparator.GTE); + assertThat(criteria.getValue()).isEqualTo(1); + } + + @Test // gh-64 + public void shouldBuildLtCriteria() { + + Criteria criteria = where("foo").lessThan(1); + + assertThat(criteria.getColumn()).isEqualTo("foo"); + assertThat(criteria.getComparator()).isEqualTo(Comparator.LT); + assertThat(criteria.getValue()).isEqualTo(1); + } + + @Test // gh-64 + public void shouldBuildLteCriteria() { + + Criteria criteria = where("foo").lessThanOrEquals(1); + + assertThat(criteria.getColumn()).isEqualTo("foo"); + assertThat(criteria.getComparator()).isEqualTo(Comparator.LTE); + assertThat(criteria.getValue()).isEqualTo(1); + } + + @Test // gh-64 + public void shouldBuildLikeCriteria() { + + Criteria criteria = where("foo").like("hello%"); + + assertThat(criteria.getColumn()).isEqualTo("foo"); + assertThat(criteria.getComparator()).isEqualTo(Comparator.LIKE); + assertThat(criteria.getValue()).isEqualTo("hello%"); + } + + @Test // gh-64 + public void shouldBuildIsNullCriteria() { + + Criteria criteria = where("foo").isNull(); + + assertThat(criteria.getColumn()).isEqualTo("foo"); + assertThat(criteria.getComparator()).isEqualTo(Comparator.IS_NULL); + } + + @Test // gh-64 + public void shouldBuildIsNotNullCriteria() { + + Criteria criteria = where("foo").isNotNull(); + + assertThat(criteria.getColumn()).isEqualTo("foo"); + assertThat(criteria.getComparator()).isEqualTo(Comparator.IS_NOT_NULL); + } +} diff --git a/src/test/java/org/springframework/data/r2dbc/function/query/QueryMapperUnitTests.java b/src/test/java/org/springframework/data/r2dbc/function/query/QueryMapperUnitTests.java new file mode 100644 index 00000000..966ff4a9 --- /dev/null +++ b/src/test/java/org/springframework/data/r2dbc/function/query/QueryMapperUnitTests.java @@ -0,0 +1,244 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.function.query; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.domain.Sort.Order.*; + +import org.junit.Test; + +import org.springframework.data.domain.Sort; +import org.springframework.data.r2dbc.dialect.BindMarkersFactory; +import org.springframework.data.r2dbc.domain.BindTarget; +import org.springframework.data.r2dbc.domain.SettableValue; +import org.springframework.data.r2dbc.function.convert.MappingR2dbcConverter; +import org.springframework.data.r2dbc.function.convert.R2dbcConverter; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.sql.Table; + +/** + * Unit tests for {@link QueryMapper}. + * + * @author Mark Paluch + */ +public class QueryMapperUnitTests { + + R2dbcConverter converter = new MappingR2dbcConverter(new RelationalMappingContext()); + QueryMapper mapper = new QueryMapper(converter); + BindTarget bindTarget = mock(BindTarget.class); + + @Test // gh-64 + public void shouldMapSimpleCriteria() { + + Criteria criteria = Criteria.where("name").is("foo"); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo("person.name = ?[$1]"); + + bindings.getBindings().apply(bindTarget); + verify(bindTarget).bind(0, "foo"); + } + + @Test // gh-64 + public void shouldMapSimpleNullableCriteria() { + + Criteria criteria = Criteria.where("name").is(SettableValue.empty(Integer.class)); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo("person.name = ?[$1]"); + + bindings.getBindings().apply(bindTarget); + verify(bindTarget).bindNull(0, Integer.class); + } + + @Test // gh-64 + public void shouldConsiderColumnName() { + + Criteria criteria = Criteria.where("alternative").is("foo"); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo("person.another_name = ?[$1]"); + } + + @Test // gh-64 + public void shouldMapAndCriteria() { + + Criteria criteria = Criteria.where("name").is("foo").and("bar").is("baz"); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo("person.name = ?[$1] AND person.bar = ?[$2]"); + + bindings.getBindings().apply(bindTarget); + verify(bindTarget).bind(0, "foo"); + verify(bindTarget).bind(1, "baz"); + } + + @Test // gh-64 + public void shouldMapOrCriteria() { + + Criteria criteria = Criteria.where("name").is("foo").or("bar").is("baz"); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo("person.name = ?[$1] OR person.bar = ?[$2]"); + } + + @Test // gh-64 + public void shouldMapAndOrCriteria() { + + Criteria criteria = Criteria.where("name").is("foo") // + .and("name").isNotNull() // + .or("bar").is("baz") // + .and("anotherOne").is("alternative"); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo( + "person.name = ?[$1] AND person.name IS NOT NULL OR person.bar = ?[$2] AND person.anotherOne = ?[$3]"); + } + + @Test // gh-64 + public void shouldMapNeq() { + + Criteria criteria = Criteria.where("name").not("foo"); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo("person.name != ?[$1]"); + } + + @Test // gh-64 + public void shouldMapIsNull() { + + Criteria criteria = Criteria.where("name").isNull(); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo("person.name IS NULL"); + } + + @Test // gh-64 + public void shouldMapIsNotNull() { + + Criteria criteria = Criteria.where("name").isNotNull(); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo("person.name IS NOT NULL"); + } + + @Test // gh-64 + public void shouldMapIsIn() { + + Criteria criteria = Criteria.where("name").in("a", "b", "c"); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo("person.name IN (?[$1], ?[$2], ?[$3])"); + } + + @Test // gh-64 + public void shouldMapIsNotIn() { + + Criteria criteria = Criteria.where("name").notIn("a", "b", "c"); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo("NOT person.name IN (?[$1], ?[$2], ?[$3])"); + } + + @Test // gh-64 + public void shouldMapIsGt() { + + Criteria criteria = Criteria.where("name").greaterThan("a"); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo("person.name > ?[$1]"); + } + + @Test // gh-64 + public void shouldMapIsGte() { + + Criteria criteria = Criteria.where("name").greaterThanOrEquals("a"); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo("person.name >= ?[$1]"); + } + + @Test // gh-64 + public void shouldMapIsLt() { + + Criteria criteria = Criteria.where("name").lessThan("a"); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo("person.name < ?[$1]"); + } + + @Test // gh-64 + public void shouldMapIsLte() { + + Criteria criteria = Criteria.where("name").lessThanOrEquals("a"); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo("person.name <= ?[$1]"); + } + + @Test // gh-64 + public void shouldMapIsLike() { + + Criteria criteria = Criteria.where("name").like("a"); + + BoundCondition bindings = map(criteria); + + assertThat(bindings.getCondition().toString()).isEqualTo("person.name LIKE ?[$1]"); + } + + @Test // gh-64 + public void shouldMapSort() { + + Sort sort = Sort.by(desc("alternative")); + + Sort mapped = mapper.getMappedObject(sort, converter.getMappingContext().getRequiredPersistentEntity(Person.class)); + + assertThat(mapped.getOrderFor("another_name")).isEqualTo(desc("another_name")); + assertThat(mapped.getOrderFor("alternative")).isNull(); + } + + @SuppressWarnings("unchecked") + private BoundCondition map(Criteria criteria) { + + BindMarkersFactory markers = BindMarkersFactory.indexed("$", 1); + + return mapper.getMappedObject(markers.create(), criteria, Table.create("person"), + converter.getMappingContext().getRequiredPersistentEntity(Person.class)); + } + + static class Person { + + String name; + @Column("another_name") String alternative; + } +} diff --git a/src/test/java/org/springframework/data/r2dbc/function/query/UpdateMapperUnitTests.java b/src/test/java/org/springframework/data/r2dbc/function/query/UpdateMapperUnitTests.java new file mode 100644 index 00000000..9c239303 --- /dev/null +++ b/src/test/java/org/springframework/data/r2dbc/function/query/UpdateMapperUnitTests.java @@ -0,0 +1,106 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.r2dbc.function.query; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.util.Map; +import java.util.stream.Collectors; + +import org.junit.Test; + +import org.springframework.data.r2dbc.dialect.BindMarkersFactory; +import org.springframework.data.r2dbc.domain.BindTarget; +import org.springframework.data.r2dbc.domain.SettableValue; +import org.springframework.data.r2dbc.function.convert.MappingR2dbcConverter; +import org.springframework.data.r2dbc.function.convert.R2dbcConverter; +import org.springframework.data.relational.core.mapping.Column; +import org.springframework.data.relational.core.mapping.RelationalMappingContext; +import org.springframework.data.relational.core.sql.AssignValue; +import org.springframework.data.relational.core.sql.Expression; +import org.springframework.data.relational.core.sql.SQL; +import org.springframework.data.relational.core.sql.Table; + +/** + * Unit tests for {@link UpdateMapper}. + * + * @author Mark Paluch + */ +public class UpdateMapperUnitTests { + + R2dbcConverter converter = new MappingR2dbcConverter(new RelationalMappingContext()); + UpdateMapper mapper = new UpdateMapper(converter); + BindTarget bindTarget = mock(BindTarget.class); + + @Test // gh-64 + public void shouldMapFieldNamesInUpdate() { + + Update update = Update.update("alternative", "foo"); + + BoundAssignments mapped = map(update); + + Map assignments = mapped.getAssignments().stream().map(it -> (AssignValue) it) + .collect(Collectors.toMap(k -> k.getColumn().getName(), AssignValue::getValue)); + + assertThat(assignments).containsEntry("another_name", SQL.bindMarker("$1")); + } + + @Test // gh-64 + public void shouldUpdateToSettableValue() { + + Update update = Update.update("alternative", SettableValue.empty(String.class)); + + BoundAssignments mapped = map(update); + + Map assignments = mapped.getAssignments().stream().map(it -> (AssignValue) it) + .collect(Collectors.toMap(k -> k.getColumn().getName(), AssignValue::getValue)); + + assertThat(assignments).containsEntry("another_name", SQL.bindMarker("$1")); + + mapped.getBindings().apply(bindTarget); + verify(bindTarget).bindNull(0, String.class); + } + + @Test // gh-64 + public void shouldUpdateToNull() { + + Update update = Update.update("alternative", null); + + BoundAssignments mapped = map(update); + + assertThat(mapped.getAssignments()).hasSize(1); + assertThat(mapped.getAssignments().get(0).toString()).isEqualTo("person.another_name = NULL"); + + mapped.getBindings().apply(bindTarget); + verifyZeroInteractions(bindTarget); + } + + @SuppressWarnings("unchecked") + private BoundAssignments map(Update update) { + + BindMarkersFactory markers = BindMarkersFactory.indexed("$", 1); + + return mapper.getMappedObject(markers.create(), update, Table.create("person"), + converter.getMappingContext().getRequiredPersistentEntity(Person.class)); + } + + static class Person { + + String name; + @Column("another_name") String alternative; + } +}