diff --git a/pom.xml b/pom.xml
index a577fa59..887d3721 100644
--- a/pom.xml
+++ b/pom.xml
@@ -7,7 +7,7 @@
org.springframework.data
spring-data-r2dbc
- 1.0.0.BUILD-SNAPSHOT
+ 1.0.0.gh-64-SNAPSHOT
Spring Data R2DBC
Spring Data module for R2DBC.
diff --git a/src/main/asciidoc/reference/r2dbc-core.adoc b/src/main/asciidoc/reference/r2dbc-core.adoc
new file mode 100644
index 00000000..87466e3e
--- /dev/null
+++ b/src/main/asciidoc/reference/r2dbc-core.adoc
@@ -0,0 +1,241 @@
+The R2DBC support contains a wide range of features:
+
+* Spring configuration support with Java-based `@Configuration` classes for an R2DBC driver instance.
+* `DatabaseClient` helper class that increases productivity when performing common R2DBC operations with integrated object mapping between rows and POJOs.
+* Exception translation into Spring's portable Data Access Exception hierarchy.
+* Feature-rich Object Mapping integrated with Spring's Conversion Service.
+* Annotation-based mapping metadata that is extensible to support other metadata formats.
+* Automatic implementation of Repository interfaces, including support for custom query methods.
+
+For most tasks, you should use `DatabaseClient` or the Repository support, which both leverage the rich mapping functionality.
+`DatabaseClient` is the place to look for accessing functionality such as ad-hoc CRUD operations.
+
+[[r2dbc.getting-started]]
+== Getting Started
+
+An easy way to bootstrap setting up a working environment is to create a Spring-based project through https://start.spring.io[start.spring.io].
+
+. Add the following to the pom.xml files `dependencies` element:
++
+[source,xml,subs="+attributes"]
+----
+
+
+
+ io.r2dbc
+ r2dbc-bom
+ ${r2dbc-releasetrain.version}
+ pom
+ import
+
+
+
+
+
+
+
+
+
+ org.springframework.data
+ spring-data-r2dbc
+ {version}
+
+
+
+
+ io.r2dbc
+ r2dbc-h2
+ {r2dbcVersion}
+
+
+
+----
+. Change the version of Spring in the pom.xml to be
++
+[source,xml,subs="+attributes"]
+----
+{springVersion}
+----
+. Add the following location of the Spring Milestone repository for Maven to your `pom.xml` such that it is at the same level of your ` ` element:
++
+[source,xml]
+----
+
+
+ spring-milestone
+ Spring Maven MILESTONE Repository
+ https://repo.spring.io/libs-milestone
+
+
+----
+
+The repository is also https://repo.spring.io/milestone/org/springframework/data/[browseable here].
+
+You may also want to set the logging level to `DEBUG` to see some additional information. To do so, edit the `application.properties` file to have the following content:
+
+[source]
+----
+logging.level.org.springframework.data.r2dbc=DEBUG
+----
+
+Then you can create a `Person` class to persist:
+
+[source,java]
+----
+package org.spring.r2dbc.example;
+
+public class Person {
+
+ private String id;
+ private String name;
+ private int age;
+
+ public Person(String id, String name, int age) {
+ this.id = id;
+ this.name = name;
+ this.age = age;
+ }
+
+ public String getId() {
+ return id;
+ }
+ public String getName() {
+ return name;
+ }
+ public int getAge() {
+ return age;
+ }
+
+ @Override
+ public String toString() {
+ return "Person [id=" + id + ", name=" + name + ", age=" + age + "]";
+ }
+}
+----
+
+Next, you need to create a table structure in your database:
+
+[source,sql]
+----
+CREATE TABLE person
+ (id VARCHAR(255) PRIMARY KEY,
+ name VARCHAR(255),
+ age INT);
+----
+
+You also need a main application to run:
+
+[source,java]
+----
+package org.spring.r2dbc.example;
+
+public class R2dbcApp {
+
+ private static final Log log = LogFactory.getLog(R2dbcApp.class);
+
+ public static void main(String[] args) throws Exception {
+
+ ConnectionFactory connectionFactory = ConnectionFactories.get("rdbc:h2:mem:///test?options=DB_CLOSE_DELAY=-1;DB_CLOSE_ON_EXIT=FALSE");
+
+ DatabaseClient client = DatabaseClient.create(connectionFactory);
+
+ client.execute()
+ .sql("CREATE TABLE person" +
+ "(id VARCHAR(255) PRIMARY KEY," +
+ "name VARCHAR(255)," +
+ "age INT)")
+ .fetch()
+ .rowsUpdated()
+ .as(StepVerifier::create)
+ .expectNextCount(1)
+ .verifyComplete();
+
+ client.insert()
+ .into(Person.class)
+ .using(new Person("joe", "Joe", 34))
+ .then()
+ .as(StepVerifier::create)
+ .verifyComplete();
+
+ client.select()
+ .from(Person.class)
+ .fetch()
+ .first()
+ .doOnNext(it -> log.info(it))
+ .as(StepVerifier::create)
+ .expectNextCount(1)
+ .verifyComplete();
+ }
+}
+----
+
+When you run the main program, the preceding examples produce output similar to the following:
+
+[source]
+----
+2018-11-28 10:47:03,893 DEBUG ata.r2dbc.function.DefaultDatabaseClient: 310 - Executing SQL statement [CREATE TABLE person
+ (id VARCHAR(255) PRIMARY KEY,
+ name VARCHAR(255),
+ age INT)]
+2018-11-28 10:47:04,074 DEBUG ata.r2dbc.function.DefaultDatabaseClient: 908 - Executing SQL statement [INSERT INTO person (id, name, age) VALUES($1, $2, $3)]
+2018-11-28 10:47:04,092 DEBUG ata.r2dbc.function.DefaultDatabaseClient: 575 - Executing SQL statement [SELECT id, name, age FROM person]
+2018-11-28 10:47:04,436 INFO org.spring.r2dbc.example.R2dbcApp: 43 - Person [id='joe', name='Joe', age=34]
+----
+
+Even in this simple example, there are few things to notice:
+
+* You can create an instance of the central helper class in Spring Data R2DBC, <>, by using a standard `io.r2dbc.spi.ConnectionFactory` object.
+* The mapper works against standard POJO objects without the need for any additional metadata (though you can optionally provide that information. See <>.).
+* Mapping conventions can use field access. Notice that the `Person` class has only getters.
+* If the constructor argument names match the column names of the stored row, they are used to instantiate the object.
+
+[[r2dbc.examples-repo]]
+== Examples Repository
+
+There is a https://github.com/spring-projects/spring-data-examples[GitHub repository with several examples] that you can download and play around with to get a feel for how the library works.
+
+[[r2dbc.drivers]]
+== Connecting to a Relational Database with Spring
+
+One of the first tasks when using relational databases and Spring is to create a `io.r2dbc.spi.ConnectionFactory` object using the IoC container. The following example explains Java-based configuration.
+
+[[r2dbc.connectionfactory]]
+=== Registering a `ConnectionFactory` Instance using Java-based Metadata
+
+The following example shows an example of using Java-based bean metadata to register an instance of a `io.r2dbc.spi.ConnectionFactory`:
+
+.Registering a `io.r2dbc.spi.ConnectionFactory` object using Java-based bean metadata
+====
+[source,java]
+----
+@Configuration
+public class ApplicationConfiguration extends AbstractR2dbcConfiguration {
+
+ @Override
+ @Bean
+ public ConnectionFactory connectionFactory() {
+ return …;
+ }
+}
+----
+====
+
+This approach lets you use the standard `io.r2dbc.spi.ConnectionFactory` instance, with the container using Spring's `AbstractR2dbcConfiguration`. As compared to registering a `ConnectionFactory` instance directly, the configuration support has the added advantage of also providing the container with an `ExceptionTranslator` implementation that translates R2DBC exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annotated with the `@Repository` annotation. This hierarchy and the use of `@Repository` is described in https://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/data-access.html[Spring's DAO support features].
+
+`AbstractR2dbcConfiguration` registers also `DatabaseClient` that is required for database interaction and for Repository implementation.
+
+[[r2dbc.drivers]]
+=== R2DBC Drivers
+
+Spring Data R2DBC supports drivers by R2DBC's pluggable SPI mechanism. Any driver implementing the R2DBC spec can be used with Spring Data R2DBC.
+R2DBC is a relatively young initiative that gains significance by maturing through adoption.
+As of writing the following drivers are available:
+
+* https://github.com/r2dbc/r2dbc-postgresql[Postgres] (`io.r2dbc:r2dbc-postgresql`)
+* https://github.com/r2dbc/r2dbc-h2[H2] (`io.r2dbc:r2dbc-h2`)
+* https://github.com/r2dbc/r2dbc-mssql[Microsoft SQL Server] (`io.r2dbc:r2dbc-mssql`)
+* https://github.com/jasync-sql/jasync-sql[Microsoft SQL Server] (`com.github.jasync-sql:jasync-r2dbc-mysql`)
+
+Spring Data R2DBC reacts to database specifics by inspecting `ConnectionFactoryMetadata` and selects the appropriate database dialect.
+You can configure an own `Dialect` if the used driver is not yet known to Spring Data R2DBC.
+
diff --git a/src/main/asciidoc/reference/r2dbc-databaseclient.adoc b/src/main/asciidoc/reference/r2dbc-databaseclient.adoc
new file mode 100644
index 00000000..5cbb8652
--- /dev/null
+++ b/src/main/asciidoc/reference/r2dbc-databaseclient.adoc
@@ -0,0 +1,111 @@
+[[r2dbc.datbaseclient]]
+= Introduction to `DatabaseClient`
+
+Spring Data R2DBC includes a reactive, non-blocking `DatabaseClient` for database interaction. The client has a functional, fluent API with reactive types for declarative composition.
+`DatabaseClient` encapsulates resource handling such as opening and closing connections so your application code can make use of executing SQL queries or calling higher-level functionality such as inserting or selecting data.
+
+NOTE: `DatabaseClient` is a young application component providing a minimal set of convenience methods that is likely to be extended through time.
+
+NOTE: Once configured, `DatabaseClient` is thread-safe and can be reused across multiple instances.
+
+Another central feature of `DatabaseClient` is translation of exceptions thrown by R2DBC drivers into Spring's portable Data Access Exception hierarchy. See "`<>`" for more information.
+
+The next section contains an example of how to work with the `DatabaseClient` in the context of the Spring container.
+
+[[r2dbc.datbaseclient.create]]
+== Creating `DatabaseClient`
+
+The simplest way to create a `DatabaseClient` is through a static factory method:
+
+[source,java]
+----
+DatabaseClient.create(ConnectionFactory connectionFactory)
+----
+
+The above method creates a `DatabaseClient` with default settings.
+
+You can also use `DatabaseClient.builder()` with further options to customize the client:
+
+* `exceptionTranslator`: Supply a specific `R2dbcExceptionTranslator` to customize how R2DBC exceptions are translated into Spring's portable Data Access Exception hierarchy. See "`<>`" for more information.
+* `dataAccessStrategy`: Strategy how SQL queries are generated and how objects are mapped.
+
+Once built, a `DatabaseClient` instance is immutable. However, you can clone it and build a modified copy without affecting the original instance, as the following example shows:
+
+[source,java]
+----
+DatabaseClient client1 = DatabaseClient.builder()
+ .exceptionTranslator(exceptionTranslatorA).build();
+
+DatabaseClient client2 = client1.mutate()
+ .exceptionTranslator(exceptionTranslatorB).build();
+----
+
+== Controlling Database Connections
+
+Spring Data R2DBC obtains a connection to the database through a `ConnectionFactory`.
+A `ConnectionFactory` is part of the R2DBC specification and is a generalized connection factory.
+It lets a container or a framework hide connection pooling and transaction management issues from the application code.
+
+When you use Spring Data R2DBC, you can create a `ConnectionFactory` using your R2DBC driver.
+`ConnectionFactory` implementations can either return the same connection, different connections or provide connection pooling.
+`DatabaseClient` uses `ConnectionFactory` to create and release connections per operation without affinity to a particular connection across multiple operations.
+
+[[r2dbc.exception]]
+= Exception Translation
+
+The Spring framework provides exception translation for a wide variety of database and mapping technologies.
+The Spring support for R2DBC extends this feature by providing implementations of the `R2dbcExceptionTranslator` interface.
+
+`R2dbcExceptionTranslator` is an interface to be implemented by classes that can translate between `R2dbcException` and Spring’s own `org.springframework.dao.DataAccessException`, which is agnostic in regard to data access strategy.
+Implementations can be generic (for example, using SQLState codes) or proprietary (for example, using Postgres error codes) for greater precision.
+
+`R2dbcExceptionSubclassTranslator` is the implementation of `R2dbcExceptionTranslator` that is used by default.
+It considers R2DBC's categorized exception hierarchy to translate these into Spring's consistent exception hierarchy.
+`R2dbcExceptionSubclassTranslator` uses `SqlStateR2dbcExceptionTranslator` as fallback if it is not able to translate an exception.
+
+`SqlErrorCodeR2dbcExceptionTranslator` uses specific vendor codes using Spring JDBC's `SQLErrorCodes`.
+It is more precise than the SQLState implementation.
+The error code translations are based on codes held in a JavaBean type class called `SQLErrorCodes`.
+This class is created and populated by an `SQLErrorCodesFactory`, which (as the name suggests) is a factory for creating SQLErrorCodes based on the contents of a configuration file named `sql-error-codes.xml` from Spring's Data Access module.
+This file is populated with vendor codes and based on the `ConnectionFactoryName` taken from `ConnectionFactoryMetadata`.
+The codes for the actual database you are using are used.
+
+The `SqlErrorCodeR2dbcExceptionTranslator` applies matching rules in the following sequence:
+
+1. Any custom translation implemented by a subclass. Normally, the provided concrete `SqlErrorCodeR2dbcExceptionTranslator` is used, so this rule does not apply. It applies only if you have actually provided a subclass implementation.
+2. Any custom implementation of the `SQLExceptionTranslator` interface that is provided as the `customSqlExceptionTranslator` property of the `SQLErrorCodes` class.
+3. Error code matching is applied.
+4. Use a fallback translator.
+
+NOTE: The `SQLErrorCodesFactory` is used by default to define Error codes and custom exception translations. They are looked up in a file named `sql-error-codes.xml` from the classpath, and the matching `SQLErrorCodes` instance is located based on the database name from the database metadata of the database in use. `SQLErrorCodesFactory` requires Spring JDBC.
+
+You can extend `SqlErrorCodeR2dbcExceptionTranslator`, as the following example shows:
+
+[source,java]
+----
+public class CustomSqlErrorCodeR2dbcExceptionTranslator extends SqlErrorCodeR2dbcExceptionTranslator {
+
+ protected DataAccessException customTranslate(String task, String sql, R2dbcException r2dbcex) {
+ if (sqlex.getErrorCode() == -12345) {
+ return new DeadlockLoserDataAccessException(task, r2dbcex);
+ }
+ return null;
+ }
+}
+----
+
+In the preceding example, the specific error code (`-12345`) is translated, while other errors are left to be translated by the default translator implementation.
+To use this custom translator, you must configure `DatabaseClient` through the builder method `exceptionTranslator`, and you must use this `DatabaseClient` for all of the data access processing where this translator is needed.
+The following example shows how you can use this custom translator:
+
+[source,java]
+----
+ConnectionFactory connectionFactory = …;
+
+CustomSqlErrorCodeR2dbcExceptionTranslator exceptionTranslator = new CustomSqlErrorCodeR2dbcExceptionTranslator();
+
+DatabaseClient client = DatabaseClient.builder()
+ .connectionFactory(connectionFactory)
+ .exceptionTranslator(exceptionTranslator)
+ .build();
+----
diff --git a/src/main/asciidoc/reference/r2dbc-fluent.adoc b/src/main/asciidoc/reference/r2dbc-fluent.adoc
new file mode 100644
index 00000000..7ac0eadd
--- /dev/null
+++ b/src/main/asciidoc/reference/r2dbc-fluent.adoc
@@ -0,0 +1,239 @@
+[[r2dbc.datbaseclient.fluent-api]]
+= Fluent Data Access API
+
+You have already seen ``DatabaseClient``s SQL API that offers you maximum flexibility to execute any type of SQL.
+`DatabaseClient` provides a more narrow interface for typical ad-hoc use-cases such as querying, inserting, updating, and deleting data.
+
+The entry points (`insert()`, `select()`, `update()`, and others) follow a natural naming schema based on the operation to be run. Moving on from the entry point, the API is designed to offer only context-dependent methods that lead to a terminating method that creates and runs a SQL statement. Spring Data R2DBC uses a `Dialect` abstraction to determine bind markers, pagination support and data types natively supported by the underlying driver.
+
+Let's take a look at a simple query:
+
+====
+[source,java]
+----
+Flux people = databaseClient.select()
+ .from(Person.class) <1>
+ .fetch()
+ .all(); <2>
+----
+<1> Using `Person` with the `from(…)` method sets the `FROM` table based on mapping metadata. It also maps tabular results on `Person` result objects.
+<2> Fetching `all()` rows returns a `Flux` without limiting results.
+====
+
+The following example declares a more complex query that specifies the table name by name, a `WHERE` condition and `ORDER BY` clause:
+
+====
+[source,java]
+----
+Mono first = databaseClient.select()
+ .from("legoset") <1>
+ .matching(where("firstname").is("John") <2>
+ .and("lastname").in("Doe", "White"))
+ .orderBy(desc("id")) <3>
+ .as(Person.class)
+ .fetch()
+ .one(); <4>
+----
+<1> Selecting from a table by name returns row results as `Map` with case-insensitive column name matching.
+<2> The issued query declares a `WHERE` condition on `firstname` and `lastname` columns to filter results.
+<3> Results can be ordered by individual column names resulting in an `ORDER BY` clause.
+<4> Selecting the one result fetches just a single row. This way of consuming rows expects the query to return exactly a single result. `Mono` emits a `IncorrectResultSizeDataAccessException` if the query yields more than a single result.
+====
+
+You can consume Query results in three ways:
+
+* Through object mapping (e.g. `as(Class)`) using Spring Data's mapping-metadata.
+* As `Map` where column names are mapped to their value. Column names are looked up case-insensitive.
+* By supplying a mapping `BiFunction` for direct access to R2DBC `Row` and `RowMetadata`
+
+You can switch between retrieving a single entity and retrieving multiple entities as through the terminating methods:
+
+* `first()`: Consume only the first row returning a `Mono`. The returned `Mono` completes without emitting an object if the query returns no results.
+* `one()`: Consume exactly one row returning a `Mono`. The returned `Mono` completes without emitting an object if the query returns no results. If the query returns more than row then `Mono` completes exceptionally emitting `IncorrectResultSizeDataAccessException`.
+* `all()`: Consume all returned rows returning a `Flux`.
+* `rowsUpdated`: Consume the number of affected rows. Typically used with `INSERT`/`UPDATE`/`DELETE` statements.
+
+[[r2dbc.datbaseclient.fluent-api.select]]
+== Selecting Data
+
+Use the `select()` entry point to express your `SELECT` queries.
+The resulting `SELECT` queries support the commonly used clauses `WHERE`, `ORDER BY` and support pagination.
+The fluent API style allows you to chain together multiple methods while having easy-to-understand code.
+To improve readability, use static imports that allow you avoid using the 'new' keyword for creating `Criteria` instances.
+
+[r2dbc.datbaseclient.fluent-api.criteria]]
+==== Methods for the Criteria Class
+
+The `Criteria` class provides the following methods, all of which correspond to SQL operators:
+
+* `Criteria` *and* `(String column)` Adds a chained `Criteria` with the specified `property` to the current `Criteria` and returns the newly created one.
+* `Criteria` *or* `(String column)` Adds a chained `Criteria` with the specified `property` to the current `Criteria` and returns the newly created one.
+* `Criteria` *greaterThan* `(Object o)` Creates a criterion using the `>` operator.
+* `Criteria` *greaterThanOrEquals* `(Object o)` Creates a criterion using the `>=` operator.
+* `Criteria` *in* `(Object... o)` Creates a criterion using the `IN` operator for a varargs argument.
+* `Criteria` *in* `(Collection> collection)` Creates a criterion using the `IN` operator using a collection.
+* `Criteria` *is* `(Object o)` Creates a criterion using column matching (`property = value`).
+* `Criteria` *isNull* `()` Creates a criterion using the `IS NULL` operator.
+* `Criteria` *isNotNull* `()` Creates a criterion using the `IS NOT NULL` operator.
+* `Criteria` *lessThan* `(Object o)` Creates a criterion using the `<` operator.
+* `Criteria` *lessThanOrEquals* `(Object o)` Creates a criterion using the `<=` operator.
+* `Criteria` *like* `(Object o)` Creates a criterion using the `LIKE` operator without escape character processing.
+* `Criteria` *not* `(Object o)` Creates a criterion using the `!=` operator.
+* `Criteria` *notIn* `(Object... o)` Creates a criterion using the `NOT IN` operator for a varargs argument.
+* `Criteria` *notIn* `(Collection> collection)` Creates a criterion using the `NOT IN` operator using a collection.
+
+You can use `Criteria` with `SELECT`, `UPDATE`, and `DELETE` queries.
+
+[r2dbc.datbaseclient.fluent-api.select.methods]]
+==== Methods for SELECT operations
+
+The `select()` entry point exposes some additional methods that provide options for the query:
+
+* *from* `(Class)` used to specify the source table using a mapped object. Returns results by default as `T`.
+* *from* `(String)` used to specify the source table name. Returns results by default as `Map`.
+* *as* `(Class)` used to map results to `T`.
+* *map* `(BiFunction)` used to supply a mapping function to extract results.
+* *project* `(String... columns)` used to specify which columns to return.
+* *matching* `(Criteria)` used to declare a `WHERE` condition to filter results.
+* *orderBy* `(Order)` used to declare a `ORDER BY` clause to sort results.
+* *page* `(Page pageable)` used to retrieve a particular page within the result. Limits the size of the returned results and reads from a offset.
+* *fetch* `()` transition call declaration to the fetch stage to declare result consumption multiplicity.
+
+[[r2dbc.datbaseclient.fluent-api.insert]]
+== Inserting Data
+
+Use the `insert()` entry point to insert data. Similar to `select()`, `insert()` allows free-form and mapped object inserts.
+
+Take a look at a simple typed insert operation:
+
+====
+[source,java]
+----
+Mono insert = databaseClient.insert()
+ .into(Person.class) <1>
+ .using(new Person(…)) <2>
+ .then(); <3>
+----
+<1> Using `Person` with the `into(…)` method sets the `INTO` table based on mapping metadata. It also prepares the insert statement to accept `Person` objects for inserting.
+<2> Provide a scalar `Person` object. Alternatively, you can supply a `Publisher` to execute a stream of `INSERT` statements. This method extracts all non-``null`` values and inserts these.
+<3> Use `then()` to just insert an object without consuming further details. Modifying statements allow consumption of the number of affected rows or tabular results for consuming generated keys.
+====
+
+Inserts also support untyped operations:
+
+====
+[source,java]
+----
+Mono insert = databaseClient.insert()
+ .into("person") <1>
+ .value("firstname", "John") <2>
+ .nullValue("lastname") <3>
+ .then(); <4>
+----
+<1> Start an insert into the `person` table.
+<2> Provide a non-null value for `firstname`.
+<3> Set `lastname` to `null`.
+<3> Use `then()` to just insert an object without consuming further details. Modifying statements allow consumption of the number of affected rows or tabular results for consuming generated keys.
+====
+
+[r2dbc.datbaseclient.fluent-api.insert.methods]]
+==== Methods for INSERT operations
+
+The `insert()` entry point exposes some additional methods that provide options for the operation:
+
+* *into* `(Class)` used to specify the target table using a mapped object. Returns results by default as `T`.
+* *into* `(String)` used to specify the target table name. Returns results by default as `Map`.
+* *using* `(T)` used to specify the object to insert.
+* *using* `(Publisher)` used to accept a stream of objects to insert.
+* *table* `(String)` used to override the target table name.
+* *value* `(String, Object)` used to provide a column value to insert.
+* *nullValue* `(String)` used to provide a null value to insert.
+* *map* `(BiFunction)` used to supply a mapping function to extract results.
+* *then* `()` execute `INSERT` without consuming any results.
+* *fetch* `()` transition call declaration to the fetch stage to declare result consumption multiplicity.
+
+[[r2dbc.datbaseclient.fluent-api.update]]
+== Updating Data
+
+Use the `update()` entry point to update rows.
+Updating data starts with a specification of the table to update accepting `Update` specifying assignments. It also accepts `Criteria` to create a `WHERE` clause.
+
+Take a look at a simple typed update operation:
+
+====
+[source,java]
+----
+Person modified = …
+
+Mono update = databaseClient.update()
+ .table(Person.class) <1>
+ .using(modified) <2>
+ .then(); <3>
+----
+<1> Using `Person` with the `table(…)` method sets the table to update based on mapping metadata.
+<2> Provide a scalar `Person` object value. `using(…)` accepts the modified object and derives primary keys and updates all column values.
+<3> Use `then()` to just update rows an object without consuming further details. Modifying statements allow also consumption of the number of affected rows.
+====
+
+Update also support untyped operations:
+
+====
+[source,java]
+----
+Mono update = databaseClient.update()
+ .table("person") <1>
+ .using(Update.update("firstname", "Jane")) <2>
+ .matching(where("firstname").is("John")) <3>
+ .then(); <4>
+----
+<1> Update table `person`.
+<2> Provide a `Update` definition, which columns to update.
+<3> The issued query declares a `WHERE` condition on `firstname` columns to filter rows to update.
+<4> Use `then()` to just update rows an object without consuming further details. Modifying statements allow also consumption of the number of affected rows.
+====
+
+[r2dbc.datbaseclient.fluent-api.update.methods]]
+==== Methods for UPDATE operations
+
+The `update()` entry point exposes some additional methods that provide options for the operation:
+
+* *table* `(Class)` used to specify the target table using a mapped object. Returns results by default as `T`.
+* *table* `(String)` used to specify the target table name. Returns results by default as `Map`.
+* *using* `(T)` used to specify the object to update. Derives criteria itself.
+* *using* `(Update)` used to specify the update definition.
+* *matching* `(Criteria)` used to declare a `WHERE` condition to rows to update.
+* *then* `()` execute `UPDATE` without consuming any results.
+* *fetch* `()` transition call declaration to the fetch stage to fetch the number of updated rows.
+
+[[r2dbc.datbaseclient.fluent-api.delete]]
+== Deleting Data
+
+Use the `delete()` entry point to delete rows.
+Removing data starts with a specification of the table to delete from and optionally accepts a `Criteria` to create a `WHERE` clause.
+
+Take a look at a simple insert operation:
+
+====
+[source,java]
+----
+Mono delete = databaseClient.delete()
+ .from(Person.class) <1>
+ .matching(where("firstname").is("John") <2>
+ .and("lastname").in("Doe", "White"))
+ .then(); <3>
+----
+<1> Using `Person` with the `from(…)` method sets the `FROM` table based on mapping metadata.
+<2> The issued query declares a `WHERE` condition on `firstname` and `lastname` columns to filter rows to delete.
+<3> Use `then()` to just delete rows an object without consuming further details. Modifying statements allow also consumption of the number of affected rows.
+====
+
+[r2dbc.datbaseclient.fluent-api.delete.methods]]
+==== Methods for DELETE operations
+
+The `delete()` entry point exposes some additional methods that provide options for the operation:
+
+* *from* `(Class)` used to specify the target table using a mapped object. Returns results by default as `T`.
+* *from* `(String)` used to specify the target table name. Returns results by default as `Map`.
+* *matching* `(Criteria)` used to declare a `WHERE` condition to rows to delete.
+* *then* `()` execute `DELETE` without consuming any results.
+* *fetch* `()` transition call declaration to the fetch stage to fetch the number of deleted rows.
diff --git a/src/main/asciidoc/reference/r2dbc-sql.adoc b/src/main/asciidoc/reference/r2dbc-sql.adoc
new file mode 100644
index 00000000..eb824d08
--- /dev/null
+++ b/src/main/asciidoc/reference/r2dbc-sql.adoc
@@ -0,0 +1,163 @@
+[[r2dbc.datbaseclient.statements]]
+= Executing Statements
+
+Running a statement is the basic functionality that is covered by `DatabaseClient`.
+The following example shows what you need to include for minimal but fully functional code that creates a new table:
+
+[source,java]
+----
+Mono completion = client.execute()
+ .sql("CREATE TABLE person (id VARCHAR(255) PRIMARY KEY, name VARCHAR(255), age INTEGER);")
+ .then();
+----
+
+`DatabaseClient` is designed for a convenient fluent usage.
+It exposes intermediate, continuation, and terminal methods at each stage of the execution specification.
+The example above uses `then()` to return a completion `Publisher` that completes as soon as the query (or queries, if the SQL query contains multiple statements) completes.
+
+NOTE: `execute().sql(…)` accepts either the SQL query string or a query `Supplier` to defer the actual query creation until execution.
+
+[[r2dbc.datbaseclient.queries]]
+== Running Queries
+
+SQL queries can return values or the number of affected rows.
+`DatabaseClient` can return the number of updated rows or the rows themselves, depending on the issued query.
+
+The following example shows an `UPDATE` statement that returns the number of updated rows:
+
+[source,java]
+----
+Mono affectedRows = client.execute()
+ .sql("UPDATE person SET name = 'Joe'")
+ .fetch().rowsUpdated();
+----
+
+Running a `SELECT` query returns a different type of result, in particular tabular results. Tabular data is typically consumed by streaming each `Row`.
+You might have noticed the use of `fetch()` in the previous example.
+`fetch()` is a continuation operator that allows you to specify how much data you want to consume.
+
+[source,java]
+----
+Mono> first = client.execute()
+ .sql("SELECT id, name FROM person")
+ .fetch().first();
+----
+
+Calling `first()` returns the first row from the result and discards remaining rows.
+You can consume data with the following operators:
+
+* `first()` return the first row of the entire result
+* `one()` returns exactly one result and fails if the result contains more rows.
+* `all()` returns all rows of the result
+* `rowsUpdated()` returns the number of affected rows (`INSERT` count, `UPDATE` count)
+
+`DatabaseClient` queries return their results by default as `Map` of column name to value. You can customize type mapping by applying an `as(Class)` operator.
+
+[source,java]
+----
+Flux all = client.execute()
+ .sql("SELECT id, name FROM mytable")
+ .as(Person.class)
+ .fetch().all();
+----
+
+`as(…)` applies <> and maps the resulting columns to your POJO.
+
+[[r2dbc.datbaseclient.mapping]]
+== Mapping Results
+
+You can customize result extraction beyond `Map` and POJO result extraction by providing an extractor `BiFunction`.
+The extractor function interacts directly with R2DBC's `Row` and `RowMetadata` objects and can return arbitrary values (singular values, collections/maps, objects).
+
+The following example extracts the `id` column and emits its value:
+
+[source,java]
+----
+Flux names= client.execute()
+ .sql("SELECT name FROM person")
+ .map((row, rowMetadata) -> row.get("id", String.class))
+ .all();
+----
+
+[[r2dbc.datbaseclient.mapping.null]]
+.What about `null`?
+****
+Relational database results may contain `null` values.
+Reactive Streams forbids emission of `null` values which requires a proper `null` handling in the extractor function.
+While you can obtain `null` values from a `Row`, you must not emit a `null` value.
+You must wrap any `null` values in an object (e.g. `Optional` for singular values) to make sure a `null` value is never returned directly by your extractor function.
+****
+
+[[r2dbc.datbaseclient.binding]]
+== Binding Values to Queries
+
+A typical application requires parameterized SQL statements to select or update rows according to some input.
+These are typically `SELECT` statements constrained by a `WHERE` clause or `INSERT`/`UPDATE` statements accepting input parameters.
+Parameterized statements bear the risk of SQL injection if parameters are not escaped properly.
+`DatabaseClient` leverages R2DBC's Bind API to eliminate the risk of SQL injection for query parameters.
+You can provide a parameterized SQL statement with the `sql(…)` operator and bind parameters to the actual `Statement`.
+Your R2DBC driver then executes the statement using prepared statements and parameter substitution.
+
+Parameter binding supports various binding strategies:
+
+* By Index using zero-based parameter indexes.
+* By Name using the placeholder name.
+
+The following example shows parameter binding for a query:
+
+[source,java]
+----
+db.execute()
+ .sql("INSERT INTO person (id, name, age) VALUES(:id, :name, :age)")
+ .bind("id", "joe")
+ .bind("name", "Joe")
+ .bind("age", 34);
+----
+
+.R2DBC Native Bind Markers
+****
+R2DBC uses database-native bind markers that depend on the actual database vendor.
+As an example, Postgres uses indexed markers such as `$1`, `$2`, `$n`.
+Another example is SQL Server that uses named bind markers prefixed with `@` (at).
+
+This is different from JDBC which requires `?` (question mark) as bind markers.
+In JDBC, the actual drivers translate question mark bind markers to database-native markers as part of their statement execution.
+
+Spring Data R2DBC allows you to use native bind markers or named bind markers with the `:name` syntax.
+
+Named parameter support leverages ``Dialect``s to expand named parameters to native bind markers at the time of query execution which gives you a certain degree of query portability across various database vendors.
+****
+
+The query-preprocessor unrolls named `Collection` parameters into a series of bind markers to remove the need of dynamic query creation based on the number of arguments.
+Nested object arrays are expanded to allow usage of e.g. select lists.
+
+Consider the following query:
+
+[source,sql]
+----
+SELECT id, name, state FROM table WHERE (name, age) IN (('John', 35), ('Ann', 50))
+----
+
+This query can be parametrized and executed as:
+
+[source,java]
+----
+List tuples = new ArrayList<>();
+tuples.add(new Object[] {"John", 35});
+tuples.add(new Object[] {"Ann", 50});
+
+db.execute()
+ .sql("SELECT id, name, state FROM table WHERE (name, age) IN (:tuples)")
+ .bind("tuples", tuples);
+----
+
+NOTE: Usage of select lists is vendor-dependent.
+
+A simpler variant using `IN` predicates:
+
+[source,java]
+----
+db.execute()
+ .sql("SELECT id, name, state FROM table WHERE age IN (:ages)")
+ .bind("ages", Arrays.asList(35, 50));
+----
diff --git a/src/main/asciidoc/reference/r2dbc-transactions.adoc b/src/main/asciidoc/reference/r2dbc-transactions.adoc
new file mode 100644
index 00000000..f85db46f
--- /dev/null
+++ b/src/main/asciidoc/reference/r2dbc-transactions.adoc
@@ -0,0 +1,29 @@
+[[r2dbc.datbaseclient.transactions]]
+= Transactions
+
+A common pattern when using relational databases is grouping multiple queries within a unit of work that is guarded by a transaction.
+Relational databases typically associate a transaction with a single transport connection.
+Using different connections hence results in utilizing different transactions.
+Spring Data R2DBC includes a transactional `DatabaseClient` implementation with `TransactionalDatabaseClient` that allows you to group multiple statements within the same transaction.
+`TransactionalDatabaseClient` is a extension of `DatabaseClient` that exposes the same functionality as `DatabaseClient` and adds transaction-management methods.
+
+You can run multiple statements within a transaction using the `inTransaction(Function)` closure:
+
+[source,java]
+----
+TransactionalDatabaseClient databaseClient = TransactionalDatabaseClient.create(connectionFactory);
+
+Flux completion = databaseClient.inTransaction(db -> {
+
+ return db.execute().sql("INSERT INTO person (id, name, age) VALUES(:id, :name, :age)")
+ .bind("id", "joe")
+ .bind("name", "Joe")
+ .bind("age", 34)
+ .fetch().rowsUpdated()
+ .then(db.execute().sql("INSERT INTO contacts (id, name) VALUES(:id, :name)")
+ .bind("id", "joe")
+ .bind("name", "Joe")
+ .fetch().rowsUpdated())
+ .then();
+});
+----
diff --git a/src/main/asciidoc/reference/r2dbc.adoc b/src/main/asciidoc/reference/r2dbc.adoc
index 27802ce5..27e5f3e1 100644
--- a/src/main/asciidoc/reference/r2dbc.adoc
+++ b/src/main/asciidoc/reference/r2dbc.adoc
@@ -1,536 +1,12 @@
[[r2dbc.core]]
= R2DBC support
-The R2DBC support contains a wide range of features:
+include::r2dbc-core.adoc[]
-* Spring configuration support with Java-based `@Configuration` classes for an R2DBC driver instance.
-* `DatabaseClient` helper class that increases productivity when performing common R2DBC operations with integrated object mapping between rows and POJOs.
-* Exception translation into Spring's portable Data Access Exception hierarchy.
-* Feature-rich Object Mapping integrated with Spring's Conversion Service.
-* Annotation-based mapping metadata that is extensible to support other metadata formats.
-* Automatic implementation of Repository interfaces, including support for custom query methods.
+include::r2dbc-databaseclient.adoc[leveloffset=+1]
-For most tasks, you should use `DatabaseClient` or the Repository support, which both leverage the rich mapping functionality.
-`DatabaseClient` is the place to look for accessing functionality such as ad-hoc CRUD operations.
+include::r2dbc-sql.adoc[leveloffset=+1]
-[[r2dbc.getting-started]]
-== Getting Started
+include::r2dbc-fluent.adoc[leveloffset=+1]
-An easy way to bootstrap setting up a working environment is to create a Spring-based project through https://start.spring.io[start.spring.io].
-
-. Add the following to the pom.xml files `dependencies` element:
-+
-[source,xml,subs="+attributes"]
-----
-
-
-
-
-
- org.springframework.data
- spring-data-r2dbc
- {version}
-
-
-
-
- io.r2dbc
- r2dbc-h2
- {r2dbcVersion}
-
-
-
-----
-. Change the version of Spring in the pom.xml to be
-+
-[source,xml,subs="+attributes"]
-----
-{springVersion}
-----
-. Add the following location of the Spring Milestone repository for Maven to your `pom.xml` such that it is at the same level of your ` ` element:
-+
-[source,xml]
-----
-
-
- spring-milestone
- Spring Maven MILESTONE Repository
- https://repo.spring.io/libs-milestone
-
-
-----
-
-The repository is also https://repo.spring.io/milestone/org/springframework/data/[browseable here].
-
-You may also want to set the logging level to `DEBUG` to see some additional information. To do so, edit the `application.properties` file to have the following content:
-
-[source]
-----
-logging.level.org.springframework.data.r2dbc=DEBUG
-----
-
-Then you can create a `Person` class to persist:
-
-[source,java]
-----
-package org.spring.r2dbc.example;
-
-public class Person {
-
- private String id;
- private String name;
- private int age;
-
- public Person(String id, String name, int age) {
- this.id = id;
- this.name = name;
- this.age = age;
- }
-
- public String getId() {
- return id;
- }
- public String getName() {
- return name;
- }
- public int getAge() {
- return age;
- }
-
- @Override
- public String toString() {
- return "Person [id=" + id + ", name=" + name + ", age=" + age + "]";
- }
-}
-----
-
-Next, you need to create a table structure in your database:
-
-[source,sql]
-----
-CREATE TABLE person
- (id VARCHAR(255) PRIMARY KEY,
- name VARCHAR(255),
- age INT);
-----
-
-You also need a main application to run:
-
-[source,java]
-----
-package org.spring.r2dbc.example;
-
-public class R2dbcApp {
-
- private static final Log log = LogFactory.getLog(R2dbcApp.class);
-
- public static void main(String[] args) throws Exception {
-
- ConnectionFactory connectionFactory = new H2ConnectionFactory(H2ConnectionConfiguration.builder()
- .url("mem:test;DB_CLOSE_DELAY=10")
- .build());
-
- DatabaseClient client = DatabaseClient.create(connectionFactory);
-
- client.execute()
- .sql("CREATE TABLE person" +
- "(id VARCHAR(255) PRIMARY KEY," +
- "name VARCHAR(255)," +
- "age INT)")
- .fetch()
- .rowsUpdated()
- .as(StepVerifier::create)
- .expectNextCount(1)
- .verifyComplete();
-
- client.insert()
- .into(Person.class)
- .using(new Person("joe", "Joe", 34))
- .then()
- .as(StepVerifier::create)
- .verifyComplete();
-
- client.select()
- .from(Person.class)
- .fetch()
- .first()
- .doOnNext(it -> log.info(it))
- .as(StepVerifier::create)
- .expectNextCount(1)
- .verifyComplete();
- }
-}
-----
-
-When you run the main program, the preceding examples produce output similar to the following:
-
-[source]
-----
-2018-11-28 10:47:03,893 DEBUG ata.r2dbc.function.DefaultDatabaseClient: 310 - Executing SQL statement [CREATE TABLE person
- (id VARCHAR(255) PRIMARY KEY,
- name VARCHAR(255),
- age INT)]
-2018-11-28 10:47:04,074 DEBUG ata.r2dbc.function.DefaultDatabaseClient: 908 - Executing SQL statement [INSERT INTO person (id, name, age) VALUES($1, $2, $3)]
-2018-11-28 10:47:04,092 DEBUG ata.r2dbc.function.DefaultDatabaseClient: 575 - Executing SQL statement [SELECT id, name, age FROM person]
-2018-11-28 10:47:04,436 INFO org.spring.r2dbc.example.R2dbcApp: 43 - Person [id='joe', name='Joe', age=34]
-----
-
-Even in this simple example, there are few things to notice:
-
-* You can create an instance of the central helper class in Spring Data R2DBC, <>, by using a standard `io.r2dbc.spi.ConnectionFactory` object.
-* The mapper works against standard POJO objects without the need for any additional metadata (though you can optionally provide that information. See <>.).
-* Mapping conventions can use field access. Notice that the `Person` class has only getters.
-* If the constructor argument names match the column names of the stored row, they are used to instantiate the object.
-
-[[r2dbc.examples-repo]]
-== Examples Repository
-
-There is a https://github.com/spring-projects/spring-data-examples[GitHub repository with several examples] that you can download and play around with to get a feel for how the library works.
-
-[[r2dbc.drivers]]
-== Connecting to a Relational Database with Spring
-
-One of the first tasks when using relational databases and Spring is to create a `io.r2dbc.spi.ConnectionFactory` object using the IoC container. The following example explains Java-based configuration.
-
-[[r2dbc.connectionfactory]]
-=== Registering a `ConnectionFactory` Instance using Java-based Metadata
-
-The following example shows an example of using Java-based bean metadata to register an instance of a `io.r2dbc.spi.ConnectionFactory`:
-
-.Registering a `io.r2dbc.spi.ConnectionFactory` object using Java-based bean metadata
-====
-[source,java]
-----
-@Configuration
-public class ApplicationConfiguration extends AbstractR2dbcConfiguration {
-
- @Override
- @Bean
- public ConnectionFactory connectionFactory() {
- return …;
- }
-}
-----
-====
-
-This approach lets you use the standard `io.r2dbc.spi.ConnectionFactory` instance, with the container using Spring's `AbstractR2dbcConfiguration`. As compared to registering a `ConnectionFactory` instance directly, the configuration support has the added advantage of also providing the container with an `ExceptionTranslator` implementation that translates R2DBC exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annotated with the `@Repository` annotation. This hierarchy and the use of `@Repository` is described in https://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/data-access.html[Spring's DAO support features].
-
-`AbstractR2dbcConfiguration` registers also `DatabaseClient` that is required for database interaction and for Repository implementation.
-
-[[r2dbc.drivers]]
-=== R2DBC Drivers
-
-Spring Data R2DBC supports drivers by R2DBC's pluggable SPI mechanism. Any driver implementing the R2DBC spec can be used with Spring Data R2DBC.
-R2DBC is a relatively young initiative that gains significance by maturing through adoption.
-As of writing the following 3 drivers are available:
-
-* https://github.com/r2dbc/r2dbc-postgresql[Postgres] (`io.r2dbc:r2dbc-postgresql`)
-* https://github.com/r2dbc/r2dbc-h2[H2] (`io.r2dbc:r2dbc-h2`)
-* https://github.com/r2dbc/r2dbc-mssql[Microsoft SQL Server] (`io.r2dbc:r2dbc-mssql`)
-
-Spring Data R2DBC reacts to database specifics by inspecting `ConnectionFactoryMetadata` and selects the appropriate database dialect.
-You can configure an own `Dialect` if the used driver is not yet known to Spring Data R2DBC.
-
-[[r2dbc.datbaseclient]]
-== Introduction to `DatabaseClient`
-
-Spring Data R2DBC includes a reactive, non-blocking `DatabaseClient` for database interaction. The client has a functional, fluent API with reactive types for declarative composition.
-`DatabaseClient` encapsulates resource handling such as opening and closing connections so your application code can make use of executing SQL queries or calling higher-level functionality such as inserting or selecting data.
-
-NOTE: `DatabaseClient` is a young application component providing a minimal set of convenience methods that is likely to be extended through time.
-
-NOTE: Once configured, `DatabaseClient` is thread-safe and can be reused across multiple instances.
-
-Another central feature of `DatabaseClient` is translation of exceptions thrown by R2DBC drivers into Spring's portable Data Access Exception hierarchy. See "`<>`" for more information.
-
-The next section contains an example of how to work with the `DatabaseClient` in the context of the Spring container.
-
-[[r2dbc.datbaseclient.create]]
-=== Creating `DatabaseClient`
-
-The simplest way to create a `DatabaseClient` is through a static factory method:
-
-[source,java]
-----
-DatabaseClient.create(ConnectionFactory connectionFactory)
-----
-
-The above method creates a `DatabaseClient` with default settings.
-
-You can also use `DatabaseClient.builder()` with further options to customize the client:
-
-* `exceptionTranslator`: Supply a specific `R2dbcExceptionTranslator` to customize how R2DBC exceptions are translated into Spring's portable Data Access Exception hierarchy. See "`<>`" for more information.
-* `dataAccessStrategy`: Strategy how SQL queries are generated and how objects are mapped.
-
-Once built, a `DatabaseClient` instance is immutable. However, you can clone it and build a modified copy without affecting the original instance, as the following example shows:
-
-[source,java]
-----
-DatabaseClient client1 = DatabaseClient.builder()
- .exceptionTranslator(exceptionTranslatorA).build();
-
-DatabaseClient client2 = client1.mutate()
- .exceptionTranslator(exceptionTranslatorB).build();
-----
-
-=== Controlling Database Connections
-
-Spring Data R2DBC obtains a connection to the database through a `ConnectionFactory`.
-A `ConnectionFactory` is part of the R2DBC specification and is a generalized connection factory.
-It lets a container or a framework hide connection pooling and transaction management issues from the application code.
-
-When you use Spring Data R2DBC, you can create a `ConnectionFactory` using your R2DBC driver.
-`ConnectionFactory` implementations can either return the same connection, different connections or provide connection pooling.
-`DatabaseClient` uses `ConnectionFactory` to create and release connections per operation without affinity to a particular connection across multiple operations.
-
-[[r2dbc.exception]]
-== Exception Translation
-
-The Spring framework provides exception translation for a wide variety of database and mapping technologies.
-This has traditionally been for JDBC and JPA. The Spring support for R2DBC extends this feature by providing implementations of the `R2dbcExceptionTranslator` interface.
-
-`R2dbcExceptionTranslator` is an interface to be implemented by classes that can translate between `R2dbcException` and Spring’s own `org.springframework.dao.DataAccessException`, which is agnostic in regard to data access strategy.
-Implementations can be generic (for example, using SQLState codes) or proprietary (for example, using Postgres error codes) for greater precision.
-
-`SqlErrorCodeR2dbcExceptionTranslator` is the implementation of `R2dbcExceptionTranslator` that is used by default.
-This implementation uses specific vendor codes.
-It is more precise than the SQLState implementation.
-The error code translations are based on codes held in a JavaBean type class called `SQLErrorCodes`.
-This class is created and populated by an `SQLErrorCodesFactory`, which (as the name suggests) is a factory for creating SQLErrorCodes based on the contents of a configuration file named `sql-error-codes.xml` from Spring's Data Access module.
-This file is populated with vendor codes and based on the `ConnectionFactoryName` taken from `ConnectionFactoryMetadata`.
-The codes for the actual database you are using are used.
-
-The `SqlErrorCodeR2dbcExceptionTranslator` applies matching rules in the following sequence:
-
-1. Any custom translation implemented by a subclass. Normally, the provided concrete `SqlErrorCodeR2dbcExceptionTranslator` is used, so this rule does not apply. It applies only if you have actually provided a subclass implementation.
-2. Any custom implementation of the `SQLExceptionTranslator` interface that is provided as the `customSqlExceptionTranslator` property of the `SQLErrorCodes` class.
-3. Error code matching is applied.
-4. Use a fallback translator.
-
-
-NOTE: The `SQLErrorCodesFactory` is used by default to define Error codes and custom exception translations. They are looked up in a file named `sql-error-codes.xml` from the classpath, and the matching `SQLErrorCodes` instance is located based on the database name from the database metadata of the database in use. `SQLErrorCodesFactory` is as of now part of Spring JDBC. Spring Data R2DBC reuses existing translation configurations.
-
-You can extend `SqlErrorCodeR2dbcExceptionTranslator`, as the following example shows:
-
-[source,java]
-----
-public class CustomSqlErrorCodeR2dbcExceptionTranslator extends SqlErrorCodeR2dbcExceptionTranslator {
-
- protected DataAccessException customTranslate(String task, String sql, R2dbcException r2dbcex) {
- if (sqlex.getErrorCode() == -12345) {
- return new DeadlockLoserDataAccessException(task, r2dbcex);
- }
- return null;
- }
-}
-----
-
-In the preceding example, the specific error code (`-12345`) is translated, while other errors are left to be translated by the default translator implementation.
-To use this custom translator, you must configure `DatabaseClient` through the builder method `exceptionTranslator`, and you must use this `DatabaseClient` for all of the data access processing where this translator is needed.
-The following example shows how you can use this custom translator:
-
-[source,java]
-----
-ConnectionFactory connectionFactory = …;
-
-CustomSqlErrorCodeR2dbcExceptionTranslator exceptionTranslator = new CustomSqlErrorCodeR2dbcExceptionTranslator();
-
-DatabaseClient client = DatabaseClient.builder()
- .connectionFactory(connectionFactory)
- .exceptionTranslator(exceptionTranslator)
- .build();
-----
-
-[[r2dbc.datbaseclient.statements]]
-=== Running Statements
-
-Running a statement is the basic functionality that is covered by `DatabaseClient`.
-The following example shows what you need to include for a minimal but fully functional class that creates a new table:
-
-[source,java]
-----
-Mono completion = client.execute()
- .sql("CREATE TABLE person (id VARCHAR(255) PRIMARY KEY, name VARCHAR(255), age INTEGER);")
- .then();
-----
-
-`DatabaseClient` is designed for a convenient fluent usage.
-It exposes intermediate, continuation, and terminal methods at each stage of the execution specification.
-The example above uses `then()` to return a completion `Publisher` that completes as soon as the query (or queries, if the SQL query contains multiple statements) completes.
-
-NOTE: `execute().sql(…)` accepts either the SQL query string or a query `Supplier` to defer the actual query creation until execution.
-
-[[r2dbc.datbaseclient.queries]]
-=== Running Queries
-
-SQL queries can return values or the number of affected rows.
-`DatabaseClient` can return the number of updated rows or the rows themselves, depending on the issued query.
-
-The following example shows an `UPDATE` statement that returns the number of updated rows:
-
-[source,java]
-----
-Mono affectedRows = client.execute()
- .sql("UPDATE person SET name = 'Joe'")
- .fetch().rowsUpdated();
-----
-
-Running a `SELECT` query returns a different type of result, in particular tabular results. Tabular data is typically consumes by streaming each `Row`.
-You might have noticed the use of `fetch()` in the previous example.
-`fetch()` is a continuation operator that allows you to specify how much data you want to consume.
-
-[source,java]
-----
-Mono> first = client.execute()
- .sql("SELECT id, name FROM person")
- .fetch().first();
-----
-
-Calling `first()` returns the first row from the result and discards remaining rows.
-You can consume data with the following operators:
-
-* `first()` return the first row of the entire result
-* `one()` returns exactly one result and fails if the result contains more rows.
-* `all()` returns all rows of the result
-* `rowsUpdated()` returns the number of affected rows (`INSERT` count, `UPDATE` count)
-
-`DatabaseClient` queries return their results by default as `Map` of column name to value. You can customize type mapping by applying an `as(Class)` operator.
-
-[source,java]
-----
-Flux all = client.execute()
- .sql("SELECT id, name FROM mytable")
- .as(Person.class)
- .fetch().all();
-----
-
-`as(…)` applies <> and maps the resulting columns to your POJO.
-
-[[r2dbc.datbaseclient.mapping]]
-=== Mapping Results
-
-You can customize result extraction beyond `Map` and POJO result extraction by providing an extractor `BiFunction`.
-The extractor function interacts directly with R2DBC's `Row` and `RowMetadata` objects and can return arbitrary values (singular values, collections/maps, objects).
-
-The following example extracts the `id` column and emits its value:
-
-[source,java]
-----
-Flux names= client.execute()
- .sql("SELECT name FROM person")
- .map((row, rowMetadata) -> row.get("id", String.class))
- .all();
-----
-
-[[r2dbc.datbaseclient.mapping.null]]
-.What about `null`?
-****
-Relational database results may contain `null` values.
-Reactive Streams forbids emission of `null` values which requires a proper `null` handling in the extractor function.
-While you can obtain `null` values from a `Row`, you must not emit a `null` value.
-You must wrap any `null` values in an object (e.g. `Optional` for singular values) to make sure a `null` value is never returned directly by your extractor function.
-****
-
-[[r2dbc.datbaseclient.binding]]
-=== Binding Values to Queries
-
-A typical application requires parameterized SQL statements to select or update rows according to some input.
-These are typically `SELECT` statements constrained by a `WHERE` clause or `INSERT`/`UPDATE` statements accepting input parameters.
-Parameterized statements bear the risk of SQL injection if parameters are not escaped properly.
-`DatabaseClient` leverages R2DBC's Bind API to eliminate the risk of SQL injection for query parameters.
-You can provide a parameterized SQL statement with the `sql(…)` operator and bind parameters to the actual `Statement`.
-Your R2DBC driver then executes the statement using prepared statements and parameter substitution.
-
-Parameter binding supports various binding strategies:
-
-* By Index using zero-based parameter indexes.
-* By Name using the placeholder name.
-
-The following example shows parameter binding for a query:
-
-[source,java]
-----
-db.execute()
- .sql("INSERT INTO person (id, name, age) VALUES(:id, :name, :age)")
- .bind("id", "joe")
- .bind("name", "Joe")
- .bind("age", 34);
-----
-
-.R2DBC Native Bind Markers
-****
-R2DBC uses database-native bind markers that depend on the actual database vendor.
-As an example, Postgres uses indexed markers such as `$1`, `$2`, `$n`.
-Another example is SQL Server that uses named bind markers prefixed with `@` (at).
-
-This is different from JDBC which requires `?` (question mark) as bind markers.
-In JDBC, the actual drivers translate question mark bind markers to database-native markers as part of their statement execution.
-
-Spring Data R2DBC allows you to use native bind markers or named bind markers with the `:name` syntax.
-
-Named parameter support leverages ``Dialect``s to expand named parameters to native bind markers at the time of query execution which gives you a certain degree of query portability across various database vendors.
-****
-
-The query-preprocessor unrolls named `Collection` parameters into a series of bind markers to remove the need of dynamic query creation based on the number of arguments.
-Nested object arrays are expanded to allow usage of e.g. select lists.
-
-Consider the following query:
-
-[source,sql]
-----
-SELECT id, name, state FROM table WHERE (name, age) IN (('John', 35), ('Ann', 50))
-----
-
-This query can be parametrized and executed as:
-
-[source,java]
-----
-List tuples = new ArrayList<>();
-tuples.add(new Object[] {"John", 35});
-tuples.add(new Object[] {"Ann", 50});
-
-db.execute()
- .sql("SELECT id, name, state FROM table WHERE (name, age) IN (:tuples)")
- .bind("tuples", tuples);
-----
-
-NOTE: Usage of select lists is vendor-dependent.
-
-A simpler variant using `IN` predicates:
-
-[source,java]
-----
-db.execute()
- .sql("SELECT id, name, state FROM table WHERE age IN (:ages)")
- .bind("ages", Arrays.asList(35, 50));
-----
-
-[[r2dbc.datbaseclient.transactions]]
-=== Transactions
-
-A common pattern when using relational databases is grouping multiple queries within a unit of work that is guarded by a transaction.
-Relational databases typically associate a transaction with a single transport connection.
-Using different connections hence results in utilizing different transactions.
-Spring Data R2DBC includes a transactional `DatabaseClient` implementation with `TransactionalDatabaseClient` that allows you to group multiple statements within the same transaction.
-`TransactionalDatabaseClient` is a extension of `DatabaseClient` that exposes the same functionality as `DatabaseClient` and adds transaction-management methods.
-
-You can run multiple statements within a transaction using the `inTransaction(Function)` closure:
-
-[source,java]
-----
-TransactionalDatabaseClient databaseClient = TransactionalDatabaseClient.create(connectionFactory);
-
-Flux completion = databaseClient.inTransaction(db -> {
-
- return db.execute().sql("INSERT INTO person (id, name, age) VALUES(:id, :name, :age)")
- .bind("id", "joe")
- .bind("name", "Joe")
- .bind("age", 34)
- .fetch().rowsUpdated()
- .then(db.execute().sql("INSERT INTO contacts (id, name) VALUES(:id, :name)")
- .bind("id", "joe")
- .bind("name", "Joe")
- .fetch().rowsUpdated())
- .then();
-});
-----
+include::r2dbc-transactions.adoc[leveloffset=+1]
diff --git a/src/main/java/org/springframework/data/r2dbc/dialect/Bindings.java b/src/main/java/org/springframework/data/r2dbc/dialect/Bindings.java
new file mode 100644
index 00000000..94fb90e0
--- /dev/null
+++ b/src/main/java/org/springframework/data/r2dbc/dialect/Bindings.java
@@ -0,0 +1,285 @@
+/*
+ * Copyright 2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.r2dbc.dialect;
+
+import io.r2dbc.spi.Statement;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Spliterator;
+import java.util.function.Consumer;
+
+import org.springframework.data.r2dbc.domain.BindTarget;
+import org.springframework.data.util.Streamable;
+import org.springframework.lang.Nullable;
+import org.springframework.util.Assert;
+
+/**
+ * Value object representing value and {@code null} bindings for a {@link Statement} using {@link BindMarkers}. Bindings
+ * are typically immutable.
+ *
+ * @author Mark Paluch
+ */
+public class Bindings implements Streamable {
+
+ private static final Bindings EMPTY = new Bindings();
+
+ private final Map bindings;
+
+ /**
+ * Create empty {@link Bindings}.
+ */
+ public Bindings() {
+ this.bindings = Collections.emptyMap();
+ }
+
+ /**
+ * Create {@link Bindings} from a {@link Map}.
+ *
+ * @param bindings must not be {@literal null}.
+ */
+ public Bindings(Collection bindings) {
+
+ Assert.notNull(bindings, "Bindings must not be null");
+
+ Map mapping = new LinkedHashMap<>(bindings.size());
+ bindings.forEach(it -> mapping.put(it.getBindMarker(), it));
+ this.bindings = mapping;
+ }
+
+ Bindings(Map bindings) {
+ this.bindings = bindings;
+ }
+
+ /**
+ * Create a new, empty {@link Bindings} object.
+ *
+ * @return a new, empty {@link Bindings} object.
+ */
+ public static Bindings empty() {
+ return EMPTY;
+ }
+
+ protected Map getBindings() {
+ return this.bindings;
+ }
+
+ /**
+ * Merge this bindings with an other {@link Bindings} object and create a new merged {@link Bindings} object.
+ *
+ * @param left the left object to merge with.
+ * @param right the right object to merge with.
+ * @return a new, merged {@link Bindings} object.
+ */
+ public static Bindings merge(Bindings left, Bindings right) {
+
+ Assert.notNull(left, "Left side Bindings must not be null");
+ Assert.notNull(right, "Right side Bindings must not be null");
+
+ List result = new ArrayList<>(left.getBindings().size() + right.getBindings().size());
+
+ result.addAll(left.getBindings().values());
+ result.addAll(right.getBindings().values());
+
+ return new Bindings(result);
+ }
+
+ /**
+ * Merge this bindings with an other {@link Bindings} object and create a new merged {@link Bindings} object.
+ *
+ * @param other the object to merge with.
+ * @return a new, merged {@link Bindings} object.
+ */
+ public Bindings and(Bindings other) {
+ return merge(this, other);
+ }
+
+ /**
+ * Apply the bindings to a {@link BindTarget}.
+ *
+ * @param bindTarget the target to apply bindings to.
+ */
+ public void apply(BindTarget bindTarget) {
+
+ Assert.notNull(bindTarget, "BindTarget must not be null");
+ this.bindings.forEach((marker, binding) -> binding.apply(bindTarget));
+ }
+
+ /**
+ * Performs the given action for each binding of this {@link Bindings} until all bindings have been processed or the
+ * action throws an exception. Actions are performed in the order of iteration (if an iteration order is specified).
+ * Exceptions thrown by the action are relayed to the
+ *
+ * @param action The action to be performed for each {@link Binding}.
+ */
+ public void forEach(Consumer super Binding> action) {
+ this.bindings.forEach((marker, binding) -> action.accept(binding));
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see java.lang.Iterable#iterator()
+ */
+ @Override
+ public Iterator iterator() {
+ return this.bindings.values().iterator();
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see java.lang.Iterable#spliterator()
+ */
+ @Override
+ public Spliterator spliterator() {
+ return this.bindings.values().spliterator();
+ }
+
+ /**
+ * Base class for value objects representing a value or a {@code NULL} binding.
+ */
+ public abstract static class Binding {
+
+ private final BindMarker marker;
+
+ protected Binding(BindMarker marker) {
+ this.marker = marker;
+ }
+
+ /**
+ * @return the associated {@link BindMarker}.
+ */
+ public BindMarker getBindMarker() {
+ return this.marker;
+ }
+
+ /**
+ * Return {@literal true} if there is a value present, otherwise {@literal false} for a {@code NULL} binding.
+ *
+ * @return {@literal true} if there is a value present, otherwise {@literal false} for a {@code NULL} binding.
+ */
+ public abstract boolean hasValue();
+
+ /**
+ * Return {@literal true} if this is is a {@code NULL} binding.
+ *
+ * @return {@literal true} if this is is a {@code NULL} binding.
+ */
+ public boolean isNull() {
+ return !hasValue();
+ }
+
+ /**
+ * Returns the value of this binding. Can be {@literal null} if this is a {@code NULL} binding.
+ *
+ * @return value of this binding. Can be {@literal null} if this is a {@code NULL} binding.
+ */
+ @Nullable
+ public abstract Object getValue();
+
+ /**
+ * Applies the binding to a {@link BindTarget}.
+ *
+ * @param bindTarget the target to apply bindings to.
+ */
+ public abstract void apply(BindTarget bindTarget);
+ }
+
+ /**
+ * Value binding.
+ */
+ public static class ValueBinding extends Binding {
+
+ private final Object value;
+
+ public ValueBinding(BindMarker marker, Object value) {
+ super(marker);
+ this.value = value;
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.springframework.data.r2dbc.function.query.Bindings.Binding#hasValue()
+ */
+ public boolean hasValue() {
+ return true;
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.springframework.data.r2dbc.function.query.Bindings.Binding#getValue()
+ */
+ public Object getValue() {
+ return this.value;
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.springframework.data.r2dbc.function.query.Bindings.Binding#apply(io.r2dbc.spi.Statement)
+ */
+ @Override
+ public void apply(BindTarget bindTarget) {
+ getBindMarker().bind(bindTarget, getValue());
+ }
+ }
+
+ /**
+ * {@code NULL} binding.
+ */
+ public static class NullBinding extends Binding {
+
+ private final Class> valueType;
+
+ public NullBinding(BindMarker marker, Class> valueType) {
+ super(marker);
+ this.valueType = valueType;
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.springframework.data.r2dbc.function.query.Bindings.Binding#hasValue()
+ */
+ public boolean hasValue() {
+ return false;
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.springframework.data.r2dbc.function.query.Bindings.Binding#getValue()
+ */
+ @Nullable
+ public Object getValue() {
+ return null;
+ }
+
+ public Class> getValueType() {
+ return this.valueType;
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.springframework.data.r2dbc.function.query.Bindings.Binding#apply(BindTarget)
+ */
+ @Override
+ public void apply(BindTarget bindTarget) {
+ getBindMarker().bindNull(bindTarget, getValueType());
+ }
+ }
+}
diff --git a/src/main/java/org/springframework/data/r2dbc/dialect/MutableBindings.java b/src/main/java/org/springframework/data/r2dbc/dialect/MutableBindings.java
new file mode 100644
index 00000000..dd2abda5
--- /dev/null
+++ b/src/main/java/org/springframework/data/r2dbc/dialect/MutableBindings.java
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.r2dbc.dialect;
+
+import io.r2dbc.spi.Statement;
+
+import java.util.LinkedHashMap;
+
+import org.springframework.util.Assert;
+
+/**
+ * Mutable extension to {@link Bindings} for Value and {@code null} bindings for a {@link Statement} using
+ * {@link BindMarkers}.
+ *
+ * @author Mark Paluch
+ */
+public class MutableBindings extends Bindings {
+
+ private final BindMarkers markers;
+
+ /**
+ * Create new {@link MutableBindings}.
+ *
+ * @param markers must not be {@literal null}.
+ */
+ public MutableBindings(BindMarkers markers) {
+
+ super(new LinkedHashMap<>());
+
+ Assert.notNull(markers, "BindMarkers must not be null");
+
+ this.markers = markers;
+ }
+
+ /**
+ * Obtain the next {@link BindMarker}. Increments {@link BindMarkers} state.
+ *
+ * @return the next {@link BindMarker}.
+ */
+ public BindMarker nextMarker() {
+ return markers.next();
+ }
+
+ /**
+ * Obtain the next {@link BindMarker} with a name {@code hint}. Increments {@link BindMarkers} state.
+ *
+ * @param hint name hint.
+ * @return the next {@link BindMarker}.
+ */
+ public BindMarker nextMarker(String hint) {
+ return markers.next(hint);
+ }
+
+ /**
+ * Bind a value to {@link BindMarker}.
+ *
+ * @param marker must not be {@literal null}.
+ * @param value must not be {@literal null}.
+ * @return {@code this} {@link MutableBindings}.
+ */
+ public MutableBindings bind(BindMarker marker, Object value) {
+
+ Assert.notNull(marker, "BindMarker must not be null");
+ Assert.notNull(value, "Value must not be null");
+
+ getBindings().put(marker, new ValueBinding(marker, value));
+
+ return this;
+ }
+
+ /**
+ * Bind a value and return the related {@link BindMarker}. Increments {@link BindMarkers} state.
+ *
+ * @param value must not be {@literal null}.
+ * @return {@code this} {@link MutableBindings}.
+ */
+ public BindMarker bind(Object value) {
+
+ Assert.notNull(value, "Value must not be null");
+
+ BindMarker marker = nextMarker();
+ getBindings().put(marker, new ValueBinding(marker, value));
+
+ return marker;
+ }
+
+ /**
+ * Bind a {@code NULL} value to {@link BindMarker}.
+ *
+ * @param marker must not be {@literal null}.
+ * @param valueType must not be {@literal null}.
+ * @return {@code this} {@link MutableBindings}.
+ */
+ public MutableBindings bindNull(BindMarker marker, Class> valueType) {
+
+ Assert.notNull(marker, "BindMarker must not be null");
+ Assert.notNull(valueType, "Value type must not be null");
+
+ getBindings().put(marker, new NullBinding(marker, valueType));
+
+ return this;
+ }
+
+ /**
+ * Bind a {@code NULL} value and return the related {@link BindMarker}. Increments {@link BindMarkers} state.
+ *
+ * @param valueType must not be {@literal null}.
+ * @return {@code this} {@link MutableBindings}.
+ */
+ public BindMarker bindNull(Class> valueType) {
+
+ Assert.notNull(valueType, "Value type must not be null");
+
+ BindMarker marker = nextMarker();
+ getBindings().put(marker, new NullBinding(marker, valueType));
+
+ return marker;
+ }
+}
diff --git a/src/main/java/org/springframework/data/r2dbc/function/BindableOperation.java b/src/main/java/org/springframework/data/r2dbc/domain/BindableOperation.java
similarity index 77%
rename from src/main/java/org/springframework/data/r2dbc/function/BindableOperation.java
rename to src/main/java/org/springframework/data/r2dbc/domain/BindableOperation.java
index d37c44f5..22f1baa3 100644
--- a/src/main/java/org/springframework/data/r2dbc/function/BindableOperation.java
+++ b/src/main/java/org/springframework/data/r2dbc/domain/BindableOperation.java
@@ -1,4 +1,19 @@
-package org.springframework.data.r2dbc.function;
+/*
+ * Copyright 2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.r2dbc.domain;
import io.r2dbc.spi.Statement;
diff --git a/src/main/java/org/springframework/data/r2dbc/function/DatabaseClient.java b/src/main/java/org/springframework/data/r2dbc/function/DatabaseClient.java
index da2cf453..39f7c8bf 100644
--- a/src/main/java/org/springframework/data/r2dbc/function/DatabaseClient.java
+++ b/src/main/java/org/springframework/data/r2dbc/function/DatabaseClient.java
@@ -30,6 +30,9 @@
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.r2dbc.domain.PreparedOperation;
+import org.springframework.data.r2dbc.domain.SettableValue;
+import org.springframework.data.r2dbc.function.query.Criteria;
+import org.springframework.data.r2dbc.function.query.Update;
import org.springframework.data.r2dbc.support.R2dbcExceptionTranslator;
/**
@@ -58,6 +61,16 @@ public interface DatabaseClient {
*/
InsertIntoSpec insert();
+ /**
+ * Prepare an SQL UPDATE call.
+ */
+ UpdateTableSpec update();
+
+ /**
+ * Prepare an SQL DELETE call.
+ */
+ DeleteFromSpec delete();
+
/**
* Return a builder to mutate properties of this database client.
*/
@@ -262,7 +275,7 @@ interface SelectFromSpec {
}
/**
- * Contract for specifying {@code SELECT} options leading to the exchange.
+ * Contract for specifying {@code INSERT} options leading to the exchange.
*/
interface InsertIntoSpec {
@@ -284,6 +297,52 @@ interface InsertIntoSpec {
TypedInsertSpec into(Class table);
}
+ /**
+ * Contract for specifying {@code UPDATE} options leading to the exchange.
+ */
+ interface UpdateTableSpec {
+
+ /**
+ * Specify the target {@literal table} to update.
+ *
+ * @param table must not be {@literal null} or empty.
+ * @return a {@link GenericUpdateSpec} for further configuration of the update. Guaranteed to be not
+ * {@literal null}.
+ */
+ GenericUpdateSpec table(String table);
+
+ /**
+ * Specify the target table to update to using the {@link Class entity class}.
+ *
+ * @param table must not be {@literal null}.
+ * @return a {@link TypedUpdateSpec} for further configuration of the update. Guaranteed to be not {@literal null}.
+ */
+ TypedUpdateSpec table(Class table);
+ }
+
+ /**
+ * Contract for specifying {@code DELETE} options leading to the exchange.
+ */
+ interface DeleteFromSpec {
+
+ /**
+ * Specify the source {@literal table} to delete from.
+ *
+ * @param table must not be {@literal null} or empty.
+ * @return a {@link DeleteMatchingSpec} for further configuration of the delete. Guaranteed to be not
+ * {@literal null}.
+ */
+ DeleteMatchingSpec from(String table);
+
+ /**
+ * Specify the source table to delete from to using the {@link Class entity class}.
+ *
+ * @param table must not be {@literal null}.
+ * @return a {@link TypedDeleteSpec} for further configuration of the delete. Guaranteed to be not {@literal null}.
+ */
+ TypedDeleteSpec from(Class table);
+ }
+
/**
* Contract for specifying {@code SELECT} options leading to the exchange.
*/
@@ -354,6 +413,13 @@ interface SelectSpec> {
*/
S project(String... selectedFields);
+ /**
+ * Configure a filter {@link Criteria}.
+ *
+ * @param criteria must not be {@literal null}.
+ */
+ S matching(Criteria criteria);
+
/**
* Configure {@link Sort}.
*
@@ -361,12 +427,21 @@ interface SelectSpec> {
*/
S orderBy(Sort sort);
+ /**
+ * Configure {@link Sort}.
+ *
+ * @param orders must not be {@literal null}.
+ */
+ default S orderBy(Sort.Order... orders) {
+ return orderBy(Sort.by(orders));
+ }
+
/**
* Configure pagination. Overrides {@link Sort} if the {@link Pageable} contains a {@link Sort} object.
*
- * @param page must not be {@literal null}.
+ * @param pageable must not be {@literal null}.
*/
- S page(Pageable page);
+ S page(Pageable pageable);
}
/**
@@ -389,12 +464,23 @@ interface GenericInsertSpec extends InsertSpec {
*
* @param field must not be {@literal null} or empty.
* @param type must not be {@literal null}.
+ * @deprecated will be removed soon. Use {@link #nullValue(String)}.
*/
- GenericInsertSpec nullValue(String field, Class> type);
+ @Deprecated
+ default GenericInsertSpec nullValue(String field, Class> type) {
+ return value(field, SettableValue.empty(type));
+ }
+
+ /**
+ * Specify a {@literal null} value to insert.
+ *
+ * @param field must not be {@literal null} or empty.
+ */
+ GenericInsertSpec nullValue(String field);
}
/**
- * Contract for specifying {@code SELECT} options leading the exchange.
+ * Contract for specifying {@code INSERT} options leading the exchange.
*/
interface TypedInsertSpec {
@@ -437,7 +523,7 @@ interface InsertSpec {
/**
* Configure a result mapping {@link java.util.function.BiFunction function}.
*
- * @param mappwingFunction must not be {@literal null}.
+ * @param mappingFunction must not be {@literal null}.
* @param result type.
* @return a {@link FetchSpec} for configuration what to fetch. Guaranteed to be not {@literal null}.
*/
@@ -456,6 +542,125 @@ interface InsertSpec {
Mono then();
}
+ /**
+ * Contract for specifying {@code UPDATE} options leading to the exchange.
+ */
+ interface GenericUpdateSpec {
+
+ /**
+ * Specify an {@link Update} object containing assignments.
+ *
+ * @param update must not be {@literal null}.
+ */
+ UpdateMatchingSpec using(Update update);
+ }
+
+ /**
+ * Contract for specifying {@code UPDATE} options leading to the exchange.
+ */
+ interface TypedUpdateSpec {
+
+ /**
+ * Update the given {@code objectToUpdate}.
+ *
+ * @param objectToUpdate the object of which the attributes will provide the values for the update and the primary
+ * key. Must not be {@literal null}.
+ * @return a {@link UpdateSpec} for further configuration of the update. Guaranteed to be not {@literal null}.
+ */
+ UpdateSpec using(T objectToUpdate);
+
+ /**
+ * Use the given {@code tableName} as update target.
+ *
+ * @param tableName must not be {@literal null} or empty.
+ * @return a {@link TypedUpdateSpec} for further configuration of the update. Guaranteed to be not {@literal null}.
+ */
+ TypedUpdateSpec table(String tableName);
+ }
+
+ /**
+ * Contract for specifying {@code UPDATE} options leading to the exchange.
+ */
+ interface UpdateMatchingSpec extends UpdateSpec {
+
+ /**
+ * Configure a filter {@link Criteria}.
+ *
+ * @param criteria must not be {@literal null}.
+ */
+ UpdateSpec matching(Criteria criteria);
+ }
+
+ /**
+ * Contract for specifying {@code UPDATE} options leading to the exchange.
+ */
+ interface UpdateSpec {
+
+ /**
+ * Perform the SQL call and retrieve the result.
+ */
+ UpdatedRowsFetchSpec fetch();
+
+ /**
+ * Perform the SQL call and return a {@link Mono} that completes without result on statement completion.
+ *
+ * @return a {@link Mono} ignoring its payload (actively dropping).
+ */
+ Mono then();
+ }
+
+ /**
+ * Contract for specifying {@code DELETE} options leading to the exchange.
+ */
+ interface TypedDeleteSpec extends DeleteSpec {
+
+ /**
+ * Use the given {@code tableName} as delete target.
+ *
+ * @param tableName must not be {@literal null} or empty.
+ * @return a {@link TypedDeleteSpec} for further configuration of the delete. Guaranteed to be not {@literal null}.
+ */
+ TypedDeleteSpec table(String tableName);
+
+ /**
+ * Configure a filter {@link Criteria}.
+ *
+ * @param criteria must not be {@literal null}.
+ */
+ DeleteSpec matching(Criteria criteria);
+ }
+
+ /**
+ * Contract for specifying {@code DELETE} options leading to the exchange.
+ */
+ interface DeleteMatchingSpec extends DeleteSpec {
+
+ /**
+ * Configure a filter {@link Criteria}.
+ *
+ * @param criteria must not be {@literal null}.
+ */
+ DeleteSpec matching(Criteria criteria);
+ }
+
+ /**
+ * Contract for specifying {@code DELETE} options leading to the exchange.
+ */
+ interface DeleteSpec {
+
+ /**
+ * Perform the SQL call and retrieve the result.
+ */
+ UpdatedRowsFetchSpec fetch();
+
+ /**
+ * Perform the SQL call and return a {@link Mono} that completes without result on statement completion.
+ *
+ * @return a {@link Mono} ignoring its payload (actively dropping).
+ */
+ Mono then();
+ }
+
/**
* Contract for specifying parameter bindings.
*/
diff --git a/src/main/java/org/springframework/data/r2dbc/function/DefaultDatabaseClient.java b/src/main/java/org/springframework/data/r2dbc/function/DefaultDatabaseClient.java
index ebf2107f..4db374c0 100644
--- a/src/main/java/org/springframework/data/r2dbc/function/DefaultDatabaseClient.java
+++ b/src/main/java/org/springframework/data/r2dbc/function/DefaultDatabaseClient.java
@@ -34,7 +34,6 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
@@ -53,13 +52,15 @@
import org.springframework.data.domain.Sort;
import org.springframework.data.r2dbc.UncategorizedR2dbcException;
import org.springframework.data.r2dbc.domain.BindTarget;
+import org.springframework.data.r2dbc.domain.BindableOperation;
import org.springframework.data.r2dbc.domain.OutboundRow;
import org.springframework.data.r2dbc.domain.PreparedOperation;
import org.springframework.data.r2dbc.domain.SettableValue;
import org.springframework.data.r2dbc.function.connectionfactory.ConnectionProxy;
import org.springframework.data.r2dbc.function.convert.ColumnMapRowMapper;
+import org.springframework.data.r2dbc.function.query.Criteria;
+import org.springframework.data.r2dbc.function.query.Update;
import org.springframework.data.r2dbc.support.R2dbcExceptionTranslator;
-import org.springframework.data.relational.core.sql.Insert;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
@@ -96,7 +97,7 @@ class DefaultDatabaseClient implements DatabaseClient, ConnectionAccessor {
@Override
public Builder mutate() {
- return builder;
+ return this.builder;
}
@Override
@@ -114,6 +115,16 @@ public InsertIntoSpec insert() {
return new DefaultInsertIntoSpec();
}
+ @Override
+ public UpdateTableSpec update() {
+ return new DefaultUpdateTableSpec();
+ }
+
+ @Override
+ public DeleteFromSpec delete() {
+ return new DefaultDeleteFromSpec();
+ }
+
/**
* Execute a callback {@link Function} within a {@link Connection} scope. The function is responsible for creating a
* {@link Mono}. The connection is released after the {@link Mono} terminates (or the subscription is cancelled).
@@ -196,7 +207,7 @@ protected Publisher closeConnection(Connection connection) {
* @throws IllegalStateException in case of no DataSource set
*/
protected ConnectionFactory obtainConnectionFactory() {
- return connector;
+ return this.connector;
}
/**
@@ -220,7 +231,7 @@ protected Connection createConnectionProxy(Connection con) {
*/
protected DataAccessException translateException(String task, @Nullable String sql, R2dbcException ex) {
- DataAccessException dae = exceptionTranslator.translate(task, sql, ex);
+ DataAccessException dae = this.exceptionTranslator.translate(task, sql, ex);
return (dae != null ? dae : new UncategorizedR2dbcException(task, sql, ex));
}
@@ -345,9 +356,9 @@ FetchSpec exchange(Supplier sqlSupplier, BiFunction FetchSpec exchange(Supplier sqlSupplier, BiFunction {
+ this.byName.forEach((name, o) -> {
if (o.getValue() != null) {
operation.bind(bindTarget, name, o.getValue());
@@ -364,7 +375,7 @@ FetchSpec exchange(Supplier sqlSupplier, BiFunction FetchSpec map(BiFunction mappingFunction) {
@Override
public FetchSpec fetch() {
- return exchange(this.sqlSupplier, mappingFunction);
+ return exchange(this.sqlSupplier, this.mappingFunction);
}
@Override
@@ -596,7 +607,7 @@ public DefaultTypedExecuteSpec bind(Object bean) {
@Override
protected DefaultTypedExecuteSpec createInstance(Map byIndex,
Map byName, Supplier sqlSupplier) {
- return createTypedExecuteSpec(byIndex, byName, sqlSupplier, typeToRead);
+ return createTypedExecuteSpec(byIndex, byName, sqlSupplier, this.typeToRead);
}
}
@@ -624,6 +635,7 @@ private abstract class DefaultSelectSpecSupport {
final String table;
final List projectedFields;
+ final @Nullable Criteria criteria;
final Sort sort;
final Pageable page;
@@ -633,6 +645,7 @@ private abstract class DefaultSelectSpecSupport {
this.table = table;
this.projectedFields = Collections.emptyList();
+ this.criteria = null;
this.sort = Sort.unsorted();
this.page = Pageable.unpaged();
}
@@ -644,34 +657,34 @@ public DefaultSelectSpecSupport project(String... selectedFields) {
projectedFields.addAll(this.projectedFields);
projectedFields.addAll(Arrays.asList(selectedFields));
- return createInstance(table, projectedFields, sort, page);
+ return createInstance(this.table, projectedFields, this.criteria, this.sort, this.page);
+ }
+
+ public DefaultSelectSpecSupport where(Criteria whereCriteria) {
+
+ Assert.notNull(whereCriteria, "Criteria must not be null!");
+
+ return createInstance(this.table, this.projectedFields, whereCriteria, this.sort, this.page);
}
public DefaultSelectSpecSupport orderBy(Sort sort) {
Assert.notNull(sort, "Sort must not be null!");
- return createInstance(table, projectedFields, sort, page);
+ return createInstance(this.table, this.projectedFields, this.criteria, sort, this.page);
}
public DefaultSelectSpecSupport page(Pageable page) {
Assert.notNull(page, "Pageable must not be null!");
- return createInstance(table, projectedFields, sort, page);
+ return createInstance(this.table, this.projectedFields, this.criteria, this.sort, page);
}
- FetchSpec execute(String sql, BiFunction mappingFunction) {
-
- Function selectFunction = it -> {
-
- if (logger.isDebugEnabled()) {
- logger.debug("Executing SQL statement [" + sql + "]");
- }
-
- return it.createStatement(sql);
- };
+ FetchSpec execute(PreparedOperation> preparedOperation, BiFunction mappingFunction) {
+ String sql = getRequiredSql(preparedOperation);
+ Function selectFunction = wrapPreparedOperation(sql, preparedOperation);
Function> resultFunction = it -> Flux.from(selectFunction.apply(it).execute());
return new DefaultSqlResult<>(DefaultDatabaseClient.this, //
@@ -681,14 +694,14 @@ FetchSpec execute(String sql, BiFunction mappingFunc
mappingFunction);
}
- protected abstract DefaultSelectSpecSupport createInstance(String table, List projectedFields, Sort sort,
- Pageable page);
+ protected abstract DefaultSelectSpecSupport createInstance(String table, List projectedFields,
+ Criteria criteria, Sort sort, Pageable page);
}
private class DefaultGenericSelectSpec extends DefaultSelectSpecSupport implements GenericSelectSpec {
- DefaultGenericSelectSpec(String table, List projectedFields, Sort sort, Pageable page) {
- super(table, projectedFields, sort, page);
+ DefaultGenericSelectSpec(String table, List projectedFields, Criteria criteria, Sort sort, Pageable page) {
+ super(table, projectedFields, criteria, sort, page);
}
DefaultGenericSelectSpec(String table) {
@@ -700,8 +713,8 @@ public TypedSelectSpec as(Class resultType) {
Assert.notNull(resultType, "Result type must not be null!");
- return new DefaultTypedSelectSpec<>(table, projectedFields, sort, page, resultType,
- dataAccessStrategy.getRowMapper(resultType));
+ return new DefaultTypedSelectSpec<>(this.table, this.projectedFields, this.criteria, this.sort, this.page,
+ resultType, dataAccessStrategy.getRowMapper(resultType));
}
@Override
@@ -717,14 +730,19 @@ public DefaultGenericSelectSpec project(String... selectedFields) {
return (DefaultGenericSelectSpec) super.project(selectedFields);
}
+ @Override
+ public DefaultGenericSelectSpec matching(Criteria criteria) {
+ return (DefaultGenericSelectSpec) super.where(criteria);
+ }
+
@Override
public DefaultGenericSelectSpec orderBy(Sort sort) {
return (DefaultGenericSelectSpec) super.orderBy(sort);
}
@Override
- public DefaultGenericSelectSpec page(Pageable page) {
- return (DefaultGenericSelectSpec) super.page(page);
+ public DefaultGenericSelectSpec page(Pageable pageable) {
+ return (DefaultGenericSelectSpec) super.page(pageable);
}
@Override
@@ -734,15 +752,23 @@ public FetchSpec> fetch() {
private FetchSpec exchange(BiFunction mappingFunction) {
- String select = dataAccessStrategy.select(table, new LinkedHashSet<>(this.projectedFields), sort, page);
+ StatementMapper mapper = dataAccessStrategy.getStatementMapper();
+
+ StatementMapper.SelectSpec selectSpec = mapper.createSelect(this.table).withProjection(this.projectedFields)
+ .withSort(this.sort).withPage(this.page);
+
+ if (this.criteria != null) {
+ selectSpec = selectSpec.withCriteria(this.criteria);
+ }
- return execute(select, mappingFunction);
+ PreparedOperation> operation = mapper.getMappedObject(selectSpec);
+ return execute(operation, mappingFunction);
}
@Override
- protected DefaultGenericSelectSpec createInstance(String table, List projectedFields, Sort sort,
- Pageable page) {
- return new DefaultGenericSelectSpec(table, projectedFields, sort, page);
+ protected DefaultGenericSelectSpec createInstance(String table, List projectedFields, Criteria criteria,
+ Sort sort, Pageable page) {
+ return new DefaultGenericSelectSpec(table, projectedFields, criteria, sort, page);
}
}
@@ -763,14 +789,14 @@ private class DefaultTypedSelectSpec extends DefaultSelectSpecSupport impleme
this.mappingFunction = dataAccessStrategy.getRowMapper(typeToRead);
}
- DefaultTypedSelectSpec(String table, List projectedFields, Sort sort, Pageable page,
+ DefaultTypedSelectSpec(String table, List projectedFields, Criteria criteria, Sort sort, Pageable page,
BiFunction mappingFunction) {
- this(table, projectedFields, sort, page, null, mappingFunction);
+ this(table, projectedFields, criteria, sort, page, null, mappingFunction);
}
- DefaultTypedSelectSpec(String table, List projectedFields, Sort sort, Pageable page, Class typeToRead,
- BiFunction mappingFunction) {
- super(table, projectedFields, sort, page);
+ DefaultTypedSelectSpec(String table, List projectedFields, Criteria criteria, Sort sort, Pageable page,
+ Class typeToRead, BiFunction mappingFunction) {
+ super(table, projectedFields, criteria, sort, page);
this.typeToRead = typeToRead;
this.mappingFunction = mappingFunction;
}
@@ -796,40 +822,54 @@ public DefaultTypedSelectSpec project(String... selectedFields) {
return (DefaultTypedSelectSpec) super.project(selectedFields);
}
+ @Override
+ public DefaultTypedSelectSpec matching(Criteria criteria) {
+ return (DefaultTypedSelectSpec) super.where(criteria);
+ }
+
@Override
public DefaultTypedSelectSpec orderBy(Sort sort) {
return (DefaultTypedSelectSpec) super.orderBy(sort);
}
@Override
- public DefaultTypedSelectSpec page(Pageable page) {
- return (DefaultTypedSelectSpec) super.page(page);
+ public DefaultTypedSelectSpec page(Pageable pageable) {
+ return (DefaultTypedSelectSpec) super.page(pageable);
}
@Override
public FetchSpec fetch() {
- return exchange(mappingFunction);
+ return exchange(this.mappingFunction);
}
private FetchSpec exchange(BiFunction mappingFunction) {
List columns;
+ StatementMapper mapper = dataAccessStrategy.getStatementMapper().forType(this.typeToRead);
if (this.projectedFields.isEmpty()) {
- columns = dataAccessStrategy.getAllColumns(typeToRead);
+ columns = dataAccessStrategy.getAllColumns(this.typeToRead);
} else {
columns = this.projectedFields;
}
- String select = dataAccessStrategy.select(table, new LinkedHashSet<>(columns), sort, page);
+ StatementMapper.SelectSpec selectSpec = mapper.createSelect(this.table).withProjection(columns)
+ .withPage(this.page).withSort(this.sort);
+
+ if (this.criteria != null) {
+ selectSpec = selectSpec.withCriteria(this.criteria);
+ }
+
+ PreparedOperation> operation = mapper.getMappedObject(selectSpec);
- return execute(select, mappingFunction);
+ return execute(operation, mappingFunction);
}
@Override
- protected DefaultTypedSelectSpec createInstance(String table, List projectedFields, Sort sort,
- Pageable page) {
- return new DefaultTypedSelectSpec<>(table, projectedFields, sort, page, typeToRead, mappingFunction);
+ protected DefaultTypedSelectSpec createInstance(String table, List projectedFields, Criteria criteria,
+ Sort sort, Pageable page) {
+ return new DefaultTypedSelectSpec<>(table, projectedFields, criteria, sort, page, this.typeToRead,
+ this.mappingFunction);
}
}
@@ -867,18 +907,22 @@ public GenericInsertSpec value(String field, Object value) {
() -> String.format("Value for field %s must not be null. Use nullValue(…) instead.", field));
Map byName = new LinkedHashMap<>(this.byName);
- byName.put(field, SettableValue.fromOrEmpty(value, value.getClass()));
+ if (value instanceof SettableValue) {
+ byName.put(field, (SettableValue) value);
+ } else {
+ byName.put(field, SettableValue.fromOrEmpty(value, value.getClass()));
+ }
return new DefaultGenericInsertSpec<>(this.table, byName, this.mappingFunction);
}
@Override
- public GenericInsertSpec nullValue(String field, Class> type) {
+ public GenericInsertSpec nullValue(String field) {
Assert.notNull(field, "Field must not be null!");
Map byName = new LinkedHashMap<>(this.byName);
- byName.put(field, SettableValue.empty(type));
+ byName.put(field, null);
return new DefaultGenericInsertSpec<>(this.table, byName, this.mappingFunction);
}
@@ -903,30 +947,19 @@ public Mono then() {
private FetchSpec exchange(BiFunction mappingFunction) {
- if (byName.isEmpty()) {
+ if (this.byName.isEmpty()) {
throw new IllegalStateException("Insert fields is empty!");
}
- PreparedOperation operation = dataAccessStrategy.getStatements().insert(table, Collections.emptyList(),
- it -> {
- byName.forEach(it::bind);
- });
-
- String sql = getRequiredSql(operation);
+ StatementMapper mapper = dataAccessStrategy.getStatementMapper();
+ StatementMapper.InsertSpec insert = mapper.createInsert(this.table);
- Function> resultFunction = it -> {
-
- Statement statement = it.createStatement(sql);
- operation.bindTo(new StatementWrapper(statement));
-
- return Flux.from(statement.execute());
- };
+ for (String column : this.byName.keySet()) {
+ insert = insert.withColumn(column, this.byName.get(column));
+ }
- return new DefaultSqlResult<>(DefaultDatabaseClient.this, //
- sql, //
- resultFunction, //
- it -> resultFunction.apply(it).flatMap(Result::getRowsUpdated).next(), //
- mappingFunction);
+ PreparedOperation> operation = mapper.getMappedObject(insert);
+ return exchangeInsert(mappingFunction, operation);
}
}
@@ -954,7 +987,7 @@ public TypedInsertSpec table(String tableName) {
Assert.hasText(tableName, "Table name must not be null or empty!");
- return new DefaultTypedInsertSpec<>(typeToInsert, tableName, objectToInsert, this.mappingFunction);
+ return new DefaultTypedInsertSpec<>(this.typeToInsert, tableName, this.objectToInsert, this.mappingFunction);
}
@Override
@@ -962,7 +995,8 @@ public InsertSpec using(T objectToInsert) {
Assert.notNull(objectToInsert, "Object to insert must not be null!");
- return new DefaultTypedInsertSpec<>(typeToInsert, table, Mono.just(objectToInsert), this.mappingFunction);
+ return new DefaultTypedInsertSpec<>(this.typeToInsert, this.table, Mono.just(objectToInsert),
+ this.mappingFunction);
}
@Override
@@ -970,7 +1004,7 @@ public InsertSpec using(Publisher objectToInsert) {
Assert.notNull(objectToInsert, "Publisher to insert must not be null!");
- return new DefaultTypedInsertSpec<>(typeToInsert, table, objectToInsert, this.mappingFunction);
+ return new DefaultTypedInsertSpec<>(this.typeToInsert, this.table, objectToInsert, this.mappingFunction);
}
@Override
@@ -988,7 +1022,7 @@ public FetchSpec fetch() {
@Override
public Mono then() {
- return Mono.from(objectToInsert).flatMapMany(toInsert -> exchange(toInsert, (row, md) -> row).all()).then();
+ return Mono.from(this.objectToInsert).flatMapMany(toInsert -> exchange(toInsert, (row, md) -> row).all()).then();
}
private FetchSpec exchange(BiFunction mappingFunction) {
@@ -1021,35 +1055,299 @@ private FetchSpec exchange(Object toInsert, BiFunction operation = dataAccessStrategy.getStatements().insert(table, Collections.emptyList(),
- it -> {
- outboundRow.forEach((k, v) -> {
+ StatementMapper mapper = dataAccessStrategy.getStatementMapper();
+ StatementMapper.InsertSpec insert = mapper.createInsert(this.table);
+
+ for (String column : outboundRow.keySet()) {
+ SettableValue settableValue = outboundRow.get(column);
+ if (settableValue.hasValue()) {
+ insert = insert.withColumn(column, settableValue);
+ }
+ }
+
+ PreparedOperation> operation = mapper.getMappedObject(insert);
+ return exchangeInsert(mappingFunction, operation);
+ }
+ }
+
+ /**
+ * Default {@link DatabaseClient.UpdateTableSpec} implementation.
+ */
+ class DefaultUpdateTableSpec implements UpdateTableSpec {
- if (v.hasValue()) {
- it.bind(k, v);
- }
- });
- });
+ @Override
+ public GenericUpdateSpec table(String table) {
+ return new DefaultGenericUpdateSpec(null, table, null, null);
+ }
- String sql = getRequiredSql(operation);
- Function> resultFunction = it -> {
+ @Override
+ public TypedUpdateSpec table(Class table) {
+ return new DefaultTypedUpdateSpec<>(table, null, null);
+ }
+ }
- Statement statement = it.createStatement(sql);
- operation.bindTo(new StatementWrapper(statement));
- statement.returnGeneratedValues();
+ @RequiredArgsConstructor
+ class DefaultGenericUpdateSpec implements GenericUpdateSpec, UpdateMatchingSpec {
- return Flux.from(statement.execute());
- };
+ private final @Nullable Class> typeToUpdate;
+ private final @Nullable String table;
+ private final Update assignments;
+ private final Criteria where;
- return new DefaultSqlResult<>(DefaultDatabaseClient.this, //
- sql, //
- resultFunction, //
- it -> resultFunction //
- .apply(it) //
- .flatMap(Result::getRowsUpdated) //
- .collect(Collectors.summingInt(Integer::intValue)), //
- mappingFunction);
+ @Override
+ public UpdateMatchingSpec using(Update update) {
+
+ Assert.notNull(update, "Update must not be null");
+
+ return new DefaultGenericUpdateSpec(this.typeToUpdate, this.table, update, this.where);
+ }
+
+ @Override
+ public UpdateSpec matching(Criteria criteria) {
+
+ Assert.notNull(criteria, "Criteria must not be null");
+
+ return new DefaultGenericUpdateSpec(this.typeToUpdate, this.table, this.assignments, criteria);
+ }
+
+ @Override
+ public UpdatedRowsFetchSpec fetch() {
+
+ String table;
+
+ if (StringUtils.isEmpty(this.table)) {
+ table = dataAccessStrategy.getTableName(this.typeToUpdate);
+ } else {
+ table = this.table;
+ }
+
+ return exchange(table);
+ }
+
+ @Override
+ public Mono then() {
+ return fetch().rowsUpdated().then();
+ }
+
+ private UpdatedRowsFetchSpec exchange(String table) {
+
+ StatementMapper mapper = dataAccessStrategy.getStatementMapper();
+
+ if (this.typeToUpdate != null) {
+ mapper = mapper.forType(this.typeToUpdate);
+ }
+
+ StatementMapper.UpdateSpec update = mapper.createUpdate(table, this.assignments);
+
+ if (this.where != null) {
+ update = update.withCriteria(this.where);
+ }
+
+ PreparedOperation> operation = mapper.getMappedObject(update);
+
+ return exchangeUpdate(operation);
+ }
+ }
+
+ @RequiredArgsConstructor
+ class DefaultTypedUpdateSpec implements TypedUpdateSpec, UpdateSpec {
+
+ private final @Nullable Class typeToUpdate;
+ private final @Nullable String table;
+ private final T objectToUpdate;
+
+ @Override
+ public UpdateSpec using(T objectToUpdate) {
+
+ Assert.notNull(objectToUpdate, "Object to update must not be null");
+
+ return new DefaultTypedUpdateSpec<>(this.typeToUpdate, this.table, objectToUpdate);
+ }
+
+ @Override
+ public TypedUpdateSpec table(String tableName) {
+
+ Assert.hasText(tableName, "Table name must not be null or empty!");
+
+ return new DefaultTypedUpdateSpec<>(this.typeToUpdate, tableName, this.objectToUpdate);
+ }
+
+ @Override
+ public UpdatedRowsFetchSpec fetch() {
+
+ String table;
+
+ if (StringUtils.isEmpty(this.table)) {
+ table = dataAccessStrategy.getTableName(this.typeToUpdate);
+ } else {
+ table = this.table;
+ }
+
+ return exchange(table);
+ }
+
+ @Override
+ public Mono then() {
+ return fetch().rowsUpdated().then();
+ }
+
+ private UpdatedRowsFetchSpec exchange(String table) {
+
+ StatementMapper mapper = dataAccessStrategy.getStatementMapper();
+ Map columns = dataAccessStrategy.getOutboundRow(this.objectToUpdate);
+ List ids = dataAccessStrategy.getIdentifierColumns(this.typeToUpdate);
+
+ if (ids.isEmpty()) {
+ throw new IllegalStateException("No identifier columns in " + this.typeToUpdate.getName() + "!");
+ }
+ Object id = columns.remove(ids.get(0)); // do not update the Id column.
+
+ Update update = null;
+
+ for (String column : columns.keySet()) {
+ if (update == null) {
+ update = Update.update(column, columns.get(column));
+ } else {
+ update = update.set(column, columns.get(column));
+ }
+ }
+
+ PreparedOperation> operation = mapper
+ .getMappedObject(mapper.createUpdate(table, update).withCriteria(Criteria.where(ids.get(0)).is(id)));
+
+ return exchangeUpdate(operation);
+ }
+ }
+
+ /**
+ * Default {@link DatabaseClient.DeleteFromSpec} implementation.
+ */
+ class DefaultDeleteFromSpec implements DeleteFromSpec {
+
+ @Override
+ public DefaultDeleteSpec> from(String table) {
+ return new DefaultDeleteSpec<>(null, table, null);
+ }
+
+ @Override
+ public DefaultDeleteSpec from(Class table) {
+ return new DefaultDeleteSpec<>(table, null, null);
+ }
+ }
+
+ /**
+ * Default implementation of {@link DatabaseClient.TypedInsertSpec}.
+ */
+ @RequiredArgsConstructor
+ class DefaultDeleteSpec implements DeleteMatchingSpec, TypedDeleteSpec {
+
+ private final @Nullable Class typeToDelete;
+ private final @Nullable String table;
+ private final Criteria where;
+
+ @Override
+ public DeleteSpec matching(Criteria criteria) {
+
+ Assert.notNull(criteria, "Criteria must not be null!");
+
+ return new DefaultDeleteSpec<>(this.typeToDelete, this.table, criteria);
+ }
+
+ @Override
+ public TypedDeleteSpec table(String tableName) {
+
+ Assert.hasText(tableName, "Table name must not be null or empty!");
+
+ return new DefaultDeleteSpec<>(this.typeToDelete, tableName, this.where);
+ }
+
+ @Override
+ public UpdatedRowsFetchSpec fetch() {
+
+ String table;
+
+ if (StringUtils.isEmpty(this.table)) {
+ table = dataAccessStrategy.getTableName(this.typeToDelete);
+ } else {
+ table = this.table;
+ }
+
+ return exchange(table);
}
+
+ @Override
+ public Mono then() {
+ return fetch().rowsUpdated().then();
+ }
+
+ private UpdatedRowsFetchSpec exchange(String table) {
+
+ StatementMapper mapper = dataAccessStrategy.getStatementMapper();
+
+ if (this.typeToDelete != null) {
+ mapper = mapper.forType(this.typeToDelete);
+ }
+
+ StatementMapper.DeleteSpec delete = mapper.createDelete(table);
+
+ if (this.where != null) {
+ delete = delete.withCriteria(this.where);
+ }
+
+ PreparedOperation> operation = mapper.getMappedObject(delete);
+
+ return exchangeUpdate(operation);
+ }
+ }
+
+ private FetchSpec exchangeInsert(BiFunction mappingFunction,
+ PreparedOperation> operation) {
+
+ String sql = getRequiredSql(operation);
+ Function insertFunction = wrapPreparedOperation(sql, operation)
+ .andThen(statement -> statement.returnGeneratedValues());
+ Function> resultFunction = it -> Flux.from(insertFunction.apply(it).execute());
+
+ return new DefaultSqlResult<>(this, //
+ sql, //
+ resultFunction, //
+ it -> sumRowsUpdated(resultFunction, it), //
+ mappingFunction);
+ }
+
+ private UpdatedRowsFetchSpec exchangeUpdate(PreparedOperation> operation) {
+
+ String sql = getRequiredSql(operation);
+ Function executeFunction = wrapPreparedOperation(sql, operation);
+ Function> resultFunction = it -> Flux.from(executeFunction.apply(it).execute());
+
+ return new DefaultSqlResult<>(this, //
+ sql, //
+ resultFunction, //
+ it -> sumRowsUpdated(resultFunction, it), //
+ (row, rowMetadata) -> rowMetadata);
+ }
+
+ private static Mono sumRowsUpdated(Function> resultFunction, Connection it) {
+
+ return resultFunction.apply(it) //
+ .flatMap(Result::getRowsUpdated) //
+ .collect(Collectors.summingInt(Integer::intValue));
+ }
+
+ private Function wrapPreparedOperation(String sql, PreparedOperation> operation) {
+
+ return it -> {
+
+ if (this.logger.isDebugEnabled()) {
+ this.logger.debug("Executing SQL statement [" + sql + "]");
+ }
+
+ Statement statement = it.createStatement(sql);
+ operation.bindTo(new StatementWrapper(statement));
+
+ return statement;
+ };
}
private static Flux doInConnectionMany(Connection connection, Function> action) {
@@ -1164,7 +1462,7 @@ Mono close() {
return Mono.defer(() -> {
if (compareAndSet(false, true)) {
- return Mono.from(closeFunction.apply(connection));
+ return Mono.from(this.closeFunction.apply(this.connection));
}
return Mono.empty();
diff --git a/src/main/java/org/springframework/data/r2dbc/function/DefaultReactiveDataAccessStrategy.java b/src/main/java/org/springframework/data/r2dbc/function/DefaultReactiveDataAccessStrategy.java
index 17f3ddbd..a7b7af1b 100644
--- a/src/main/java/org/springframework/data/r2dbc/function/DefaultReactiveDataAccessStrategy.java
+++ b/src/main/java/org/springframework/data/r2dbc/function/DefaultReactiveDataAccessStrategy.java
@@ -19,19 +19,13 @@
import io.r2dbc.spi.RowMetadata;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.Collections;
import java.util.List;
-import java.util.OptionalLong;
-import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.Function;
import org.springframework.dao.InvalidDataAccessResourceUsageException;
import org.springframework.data.convert.CustomConversions.StoreConversions;
-import org.springframework.data.domain.Pageable;
-import org.springframework.data.domain.Sort;
-import org.springframework.data.domain.Sort.Order;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.r2dbc.dialect.ArrayColumns;
import org.springframework.data.r2dbc.dialect.BindMarkersFactory;
@@ -42,16 +36,11 @@
import org.springframework.data.r2dbc.function.convert.MappingR2dbcConverter;
import org.springframework.data.r2dbc.function.convert.R2dbcConverter;
import org.springframework.data.r2dbc.function.convert.R2dbcCustomConversions;
-import org.springframework.data.r2dbc.support.StatementRenderUtil;
+import org.springframework.data.r2dbc.function.query.UpdateMapper;
import org.springframework.data.relational.core.mapping.RelationalMappingContext;
import org.springframework.data.relational.core.mapping.RelationalPersistentEntity;
import org.springframework.data.relational.core.mapping.RelationalPersistentProperty;
-import org.springframework.data.relational.core.sql.Expression;
-import org.springframework.data.relational.core.sql.OrderByField;
import org.springframework.data.relational.core.sql.Select;
-import org.springframework.data.relational.core.sql.SelectBuilder.SelectFromAndOrderBy;
-import org.springframework.data.relational.core.sql.StatementBuilder;
-import org.springframework.data.relational.core.sql.Table;
import org.springframework.data.relational.core.sql.render.NamingStrategies;
import org.springframework.data.relational.core.sql.render.RenderContext;
import org.springframework.data.relational.core.sql.render.RenderNamingStrategy;
@@ -69,8 +58,9 @@ public class DefaultReactiveDataAccessStrategy implements ReactiveDataAccessStra
private final Dialect dialect;
private final R2dbcConverter converter;
+ private final UpdateMapper updateMapper;
private final MappingContext, ? extends RelationalPersistentProperty> mappingContext;
- private final StatementFactory statements;
+ private final StatementMapper statementMapper;
/**
* Creates a new {@link DefaultReactiveDataAccessStrategy} given {@link Dialect}.
@@ -94,14 +84,6 @@ private static R2dbcConverter createConverter(Dialect dialect) {
return new MappingR2dbcConverter(context, customConversions);
}
- public R2dbcConverter getConverter() {
- return converter;
- }
-
- public MappingContext, ? extends RelationalPersistentProperty> getMappingContext() {
- return mappingContext;
- }
-
/**
* Creates a new {@link DefaultReactiveDataAccessStrategy} given {@link Dialect} and {@link R2dbcConverter}.
*
@@ -115,6 +97,7 @@ public DefaultReactiveDataAccessStrategy(Dialect dialect, R2dbcConverter convert
Assert.notNull(converter, "RelationalConverter must not be null");
this.converter = converter;
+ this.updateMapper = new UpdateMapper(converter);
this.mappingContext = (MappingContext, ? extends RelationalPersistentProperty>) this.converter
.getMappingContext();
this.dialect = dialect;
@@ -141,17 +124,17 @@ public SelectRenderContext getSelect() {
}
};
- this.statements = new DefaultStatementFactory(this.dialect, renderContext);
+ this.statementMapper = new DefaultStatementMapper(dialect, renderContext, this.updateMapper, this.mappingContext);
}
/*
* (non-Javadoc)
- * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getAllFields(java.lang.Class)
+ * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getAllColumns(java.lang.Class)
*/
@Override
- public List getAllColumns(Class> typeToRead) {
+ public List getAllColumns(Class> entityType) {
- RelationalPersistentEntity> persistentEntity = getPersistentEntity(typeToRead);
+ RelationalPersistentEntity> persistentEntity = getPersistentEntity(entityType);
if (persistentEntity == null) {
return Collections.singletonList("*");
@@ -165,6 +148,26 @@ public List getAllColumns(Class> typeToRead) {
return columnNames;
}
+ /*
+ * (non-Javadoc)
+ * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getIdentifierColumns(java.lang.Class)
+ */
+ @Override
+ public List getIdentifierColumns(Class> entityType) {
+
+ RelationalPersistentEntity> persistentEntity = getRequiredPersistentEntity(entityType);
+
+ List columnNames = new ArrayList<>();
+ for (RelationalPersistentProperty property : persistentEntity) {
+
+ if (property.isIdProperty()) {
+ columnNames.add(property.getColumnName());
+ }
+ }
+
+ return columnNames;
+ }
+
/*
* (non-Javadoc)
* @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getOutboundRow(java.lang.Object)
@@ -175,7 +178,7 @@ public OutboundRow getOutboundRow(Object object) {
OutboundRow row = new OutboundRow();
- converter.write(object, row);
+ this.converter.write(object, row);
RelationalPersistentEntity> entity = getRequiredPersistentEntity(ClassUtils.getUserClass(object));
@@ -198,53 +201,25 @@ private boolean shouldConvertArrayValue(RelationalPersistentProperty property, S
private SettableValue getArrayValue(SettableValue value, RelationalPersistentProperty property) {
- ArrayColumns arrayColumns = dialect.getArraySupport();
+ ArrayColumns arrayColumns = this.dialect.getArraySupport();
if (!arrayColumns.isSupported()) {
throw new InvalidDataAccessResourceUsageException(
- "Dialect " + dialect.getClass().getName() + " does not support array columns");
+ "Dialect " + this.dialect.getClass().getName() + " does not support array columns");
}
- return SettableValue.fromOrEmpty(converter.getArrayValue(arrayColumns, property, value.getValue()),
+ return SettableValue.fromOrEmpty(this.converter.getArrayValue(arrayColumns, property, value.getValue()),
property.getActualType());
}
- /*
- * (non-Javadoc)
- * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getMappedSort(java.lang.Class, org.springframework.data.domain.Sort)
- */
- @Override
- public Sort getMappedSort(Class> typeToRead, Sort sort) {
-
- RelationalPersistentEntity> entity = getPersistentEntity(typeToRead);
- if (entity == null) {
- return sort;
- }
-
- List mappedOrder = new ArrayList<>();
-
- for (Order order : sort) {
-
- RelationalPersistentProperty persistentProperty = entity.getPersistentProperty(order.getProperty());
- if (persistentProperty == null) {
- mappedOrder.add(order);
- } else {
- mappedOrder
- .add(Order.by(persistentProperty.getColumnName()).with(order.getNullHandling()).with(order.getDirection()));
- }
- }
-
- return Sort.by(mappedOrder);
- }
-
/*
* (non-Javadoc)
* @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getRowMapper(java.lang.Class)
*/
@Override
public BiFunction getRowMapper(Class typeToRead) {
- return new EntityRowMapper<>(typeToRead, converter);
+ return new EntityRowMapper<>(typeToRead, this.converter);
}
/*
@@ -258,11 +233,11 @@ public String getTableName(Class> type) {
/*
* (non-Javadoc)
- * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getStatements()
+ * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getStatementMapper()
*/
@Override
- public StatementFactory getStatements() {
- return this.statements;
+ public StatementMapper getStatementMapper() {
+ return this.statementMapper;
}
/*
@@ -271,67 +246,27 @@ public StatementFactory getStatements() {
*/
@Override
public BindMarkersFactory getBindMarkersFactory() {
- return dialect.getBindMarkersFactory();
- }
-
- private RelationalPersistentEntity> getRequiredPersistentEntity(Class> typeToRead) {
- return mappingContext.getRequiredPersistentEntity(typeToRead);
- }
-
- @Nullable
- private RelationalPersistentEntity> getPersistentEntity(Class> typeToRead) {
- return mappingContext.getPersistentEntity(typeToRead);
+ return this.dialect.getBindMarkersFactory();
}
/*
* (non-Javadoc)
- * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#select(java.lang.String, java.util.Set, org.springframework.data.domain.Sort, org.springframework.data.domain.Pageable)
+ * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getConverter()
*/
- @Override
- public String select(String tableName, Set columns, Sort sort, Pageable page) {
-
- Table table = Table.create(tableName);
-
- Collection extends Expression> selectList;
-
- if (columns.isEmpty()) {
- selectList = Collections.singletonList(table.asterisk());
- } else {
- selectList = table.columns(columns);
- }
-
- SelectFromAndOrderBy selectBuilder = StatementBuilder //
- .select(selectList) //
- .from(tableName) //
- .orderBy(createOrderByFields(table, sort));
-
- OptionalLong limit = OptionalLong.empty();
- OptionalLong offset = OptionalLong.empty();
-
- if (page.isPaged()) {
- limit = OptionalLong.of(page.getPageSize());
- offset = OptionalLong.of(page.getOffset());
- }
-
- // See https://github.com/spring-projects/spring-data-r2dbc/issues/55
- return StatementRenderUtil.render(selectBuilder.build(), limit, offset, this.dialect);
+ public R2dbcConverter getConverter() {
+ return this.converter;
}
- private Collection extends OrderByField> createOrderByFields(Table table, Sort sortToUse) {
-
- List fields = new ArrayList<>();
-
- for (Order order : sortToUse) {
-
- OrderByField orderByField = OrderByField.from(table.column(order.getProperty()));
+ public MappingContext, ? extends RelationalPersistentProperty> getMappingContext() {
+ return this.mappingContext;
+ }
- if (order.getDirection() != null) {
- fields.add(order.isAscending() ? orderByField.asc() : orderByField.desc());
- } else {
- fields.add(orderByField);
- }
- }
+ private RelationalPersistentEntity> getRequiredPersistentEntity(Class> typeToRead) {
+ return this.mappingContext.getRequiredPersistentEntity(typeToRead);
+ }
- return fields;
+ @Nullable
+ private RelationalPersistentEntity> getPersistentEntity(Class> typeToRead) {
+ return this.mappingContext.getPersistentEntity(typeToRead);
}
}
diff --git a/src/main/java/org/springframework/data/r2dbc/function/DefaultStatementFactory.java b/src/main/java/org/springframework/data/r2dbc/function/DefaultStatementFactory.java
index 980a6ce4..5e713db9 100644
--- a/src/main/java/org/springframework/data/r2dbc/function/DefaultStatementFactory.java
+++ b/src/main/java/org/springframework/data/r2dbc/function/DefaultStatementFactory.java
@@ -15,455 +15,21 @@
*/
package org.springframework.data.r2dbc.function;
-import io.r2dbc.spi.Statement;
-import lombok.Getter;
import lombok.RequiredArgsConstructor;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.function.BiConsumer;
-import java.util.function.BiFunction;
-import java.util.function.Consumer;
-
-import org.springframework.dao.InvalidDataAccessApiUsageException;
-import org.springframework.data.r2dbc.dialect.BindMarker;
-import org.springframework.data.r2dbc.dialect.BindMarkers;
import org.springframework.data.r2dbc.dialect.Dialect;
-import org.springframework.data.r2dbc.domain.BindTarget;
-import org.springframework.data.r2dbc.domain.PreparedOperation;
-import org.springframework.data.r2dbc.domain.SettableValue;
-import org.springframework.data.relational.core.sql.AssignValue;
-import org.springframework.data.relational.core.sql.Assignment;
-import org.springframework.data.relational.core.sql.Column;
-import org.springframework.data.relational.core.sql.Condition;
-import org.springframework.data.relational.core.sql.Delete;
-import org.springframework.data.relational.core.sql.DeleteBuilder;
-import org.springframework.data.relational.core.sql.Expression;
-import org.springframework.data.relational.core.sql.Insert;
-import org.springframework.data.relational.core.sql.SQL;
-import org.springframework.data.relational.core.sql.Select;
-import org.springframework.data.relational.core.sql.SelectBuilder;
-import org.springframework.data.relational.core.sql.StatementBuilder;
-import org.springframework.data.relational.core.sql.Table;
-import org.springframework.data.relational.core.sql.Update;
-import org.springframework.data.relational.core.sql.UpdateBuilder;
import org.springframework.data.relational.core.sql.render.RenderContext;
-import org.springframework.data.relational.core.sql.render.SqlRenderer;
-import org.springframework.lang.Nullable;
-import org.springframework.util.Assert;
/**
* Default {@link StatementFactory} implementation.
*
* @author Mark Paluch
*/
+// TODO: Move DefaultPreparedOperation et al to a better place. Probably StatementMapper.
@RequiredArgsConstructor
-class DefaultStatementFactory implements StatementFactory {
+class DefaultStatementFactory {
private final Dialect dialect;
private final RenderContext renderContext;
- /*
- * (non-Javadoc)
- * @see org.springframework.data.r2dbc.function.StatementFactory#select(java.lang.String, java.util.Collection, java.util.function.Consumer)
- */
- @Override
- public PreparedOperation select(String tableName, Collection columnNames,
- Consumer binderConsumer) {
-
- Assert.hasText(tableName, "Table must not be empty");
- Assert.notEmpty(columnNames, "Columns must not be empty");
- Assert.notNull(binderConsumer, "Binder Consumer must not be null");
-
- DefaultBinderBuilder binderBuilder = new DefaultBinderBuilder() {
- @Override
- public void bind(String identifier, SettableValue settable) {
- throw new InvalidDataAccessApiUsageException("Binding for SELECT not supported. Use filterBy(…)");
- }
- };
-
- binderConsumer.accept(binderBuilder);
-
- return withDialect((dialect, renderContext) -> {
- Table table = Table.create(tableName);
- List columns = table.columns(columnNames);
- SelectBuilder.SelectFromAndJoin selectBuilder = StatementBuilder.select(columns).from(table);
-
- BindMarkers bindMarkers = dialect.getBindMarkersFactory().create();
- Binding binding = binderBuilder.build(table, bindMarkers);
- Select select;
-
- if (binding.hasCondition()) {
- select = selectBuilder.where(binding.getCondition()).build();
- } else {
- select = selectBuilder.build();
- }
-
- return new DefaultPreparedOperation<>( //
- select, //
- renderContext, //
- binding //
- );
- });
- }
-
- /*
- * (non-Javadoc)
- * @see org.springframework.data.r2dbc.function.StatementFactory#insert(java.lang.String, java.util.Collection, java.util.function.Consumer)
- */
- @Override
- public PreparedOperation insert(String tableName, Collection generatedKeysNames,
- Consumer binderConsumer) {
-
- Assert.hasText(tableName, "Table must not be empty");
- Assert.notNull(generatedKeysNames, "Generated key names must not be null");
- Assert.notNull(binderConsumer, "Binder Consumer must not be null");
-
- DefaultBinderBuilder binderBuilder = new DefaultBinderBuilder() {
- @Override
- public void filterBy(String identifier, SettableValue settable) {
- throw new InvalidDataAccessApiUsageException("Filter-Binding for INSERT not supported. Use bind(…)");
- }
- };
-
- binderConsumer.accept(binderBuilder);
-
- return withDialect((dialect, renderContext) -> {
-
- BindMarkers bindMarkers = dialect.getBindMarkersFactory().create();
- Table table = Table.create(tableName);
-
- Map expressionBindings = new LinkedHashMap<>();
- List expressions = new ArrayList<>();
- binderBuilder.forEachBinding((column, settableValue) -> {
-
- BindMarker bindMarker = bindMarkers.next(column);
-
- expressions.add(SQL.bindMarker(bindMarker.getPlaceholder()));
- expressionBindings.put(bindMarker, settableValue);
- });
-
- if (expressions.isEmpty()) {
- throw new IllegalStateException("INSERT contains no value expressions");
- }
-
- Binding binding = binderBuilder.build(table, bindMarkers).withBindings(expressionBindings);
- Insert insert = StatementBuilder.insert().into(table).columns(table.columns(binderBuilder.bindings.keySet()))
- .values(expressions).build();
-
- return new DefaultPreparedOperation(insert, renderContext, binding);
- });
- }
-
- /*
- * (non-Javadoc)
- * @see org.springframework.data.r2dbc.function.StatementFactory#update(java.lang.String, java.util.function.Consumer)
- */
- @Override
- public PreparedOperation update(String tableName, Consumer binderConsumer) {
-
- Assert.hasText(tableName, "Table must not be empty");
- Assert.notNull(binderConsumer, "Binder Consumer must not be null");
-
- DefaultBinderBuilder binderBuilder = new DefaultBinderBuilder();
-
- binderConsumer.accept(binderBuilder);
-
- return withDialect((dialect, renderContext) -> {
-
- BindMarkers bindMarkers = dialect.getBindMarkersFactory().create();
- Table table = Table.create(tableName);
-
- Map assignmentBindings = new LinkedHashMap<>();
- List assignments = new ArrayList<>();
- binderBuilder.forEachBinding((column, settableValue) -> {
-
- BindMarker bindMarker = bindMarkers.next(column);
- AssignValue assignment = table.column(column).set(SQL.bindMarker(bindMarker.getPlaceholder()));
-
- assignments.add(assignment);
- assignmentBindings.put(bindMarker, settableValue);
- });
-
- if (assignments.isEmpty()) {
- throw new IllegalStateException("UPDATE contains no assignments");
- }
-
- UpdateBuilder.UpdateWhere updateBuilder = StatementBuilder.update(table).set(assignments);
-
- Binding binding = binderBuilder.build(table, bindMarkers).withBindings(assignmentBindings);
- Update update;
-
- if (binding.hasCondition()) {
- update = updateBuilder.where(binding.getCondition()).build();
- } else {
- update = updateBuilder.build();
- }
-
- return new DefaultPreparedOperation<>(update, renderContext, binding);
- });
- }
-
- /*
- * (non-Javadoc)
- * @see org.springframework.data.r2dbc.function.StatementFactory#delete(java.lang.String, java.util.function.Consumer)
- */
- @Override
- public PreparedOperation delete(String tableName, Consumer binderConsumer) {
-
- Assert.hasText(tableName, "Table must not be empty");
- Assert.notNull(binderConsumer, "Binder Consumer must not be null");
-
- DefaultBinderBuilder binderBuilder = new DefaultBinderBuilder() {
- @Override
- public void bind(String identifier, SettableValue settable) {
- throw new InvalidDataAccessApiUsageException("Binding for DELETE not supported. Use filterBy(…)");
- }
- };
-
- binderConsumer.accept(binderBuilder);
-
- return withDialect((dialect, renderContext) -> {
-
- Table table = Table.create(tableName);
- DeleteBuilder.DeleteWhere deleteBuilder = StatementBuilder.delete().from(table);
-
- BindMarkers bindMarkers = dialect.getBindMarkersFactory().create();
- Binding binding = binderBuilder.build(table, bindMarkers);
- Delete delete;
-
- if (binding.hasCondition()) {
- delete = deleteBuilder.where(binding.getCondition()).build();
- } else {
- delete = deleteBuilder.build();
- }
-
- return new DefaultPreparedOperation<>(delete, renderContext, binding);
- });
- }
-
- private T withDialect(BiFunction action) {
-
- Assert.notNull(action, "Action must not be null");
-
- return action.apply(this.dialect, this.renderContext);
- }
-
- /**
- * Default {@link StatementBinderBuilder} implementation.
- */
- static class DefaultBinderBuilder implements StatementBinderBuilder {
-
- final Map filters = new LinkedHashMap<>();
- final Map bindings = new LinkedHashMap<>();
-
- /*
- * (non-Javadoc)
- * @see org.springframework.data.r2dbc.function.StatementFactory.StatementBinderBuilder#filterBy(java.lang.String, org.springframework.data.r2dbc.domain.SettableValue)
- */
- @Override
- public void filterBy(String identifier, SettableValue settable) {
-
- Assert.hasText(identifier, "FilterBy identifier must not be empty");
- Assert.notNull(settable, "SettableValue for Filter must not be null");
- this.filters.put(identifier, settable);
- }
-
- /*
- * (non-Javadoc)
- * @see org.springframework.data.r2dbc.function.StatementFactory.StatementBinderBuilder#bind(java.lang.String, org.springframework.data.r2dbc.domain.SettableValue)
- */
- @Override
- public void bind(String identifier, SettableValue settable) {
-
- Assert.hasText(identifier, "Bind value identifier must not be empty");
- Assert.notNull(settable, "SettableValue must not be null");
-
- this.bindings.put(identifier, settable);
- }
-
- /**
- * Call {@link BiConsumer} for each filter binding.
- *
- * @param consumer the consumer to notify.
- */
- void forEachFilter(BiConsumer consumer) {
- filters.forEach(consumer);
- }
-
- /**
- * Call {@link BiConsumer} for each value binding.
- *
- * @param consumer the consumer to notify.
- */
- void forEachBinding(BiConsumer consumer) {
- bindings.forEach(consumer);
- }
-
- Binding build(Table table, BindMarkers bindMarkers) {
-
- Map values = new LinkedHashMap<>();
- Map