diff --git a/.gitignore b/.gitignore
index 06998cb..47c72c2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -400,3 +400,7 @@ FodyWeavers.xsd
# Test run files
src/*-tests.txt
+
+# Documentation builds and intermediate files
+_site/
+api/
diff --git a/bitbadger-doc.png b/bitbadger-doc.png
new file mode 100644
index 0000000..22b1fe2
Binary files /dev/null and b/bitbadger-doc.png differ
diff --git a/doc-template/public/main.css b/doc-template/public/main.css
new file mode 100644
index 0000000..cfa8c03
--- /dev/null
+++ b/doc-template/public/main.css
@@ -0,0 +1,4 @@
+article h2 {
+ border-bottom: solid 1px gray;
+ margin-bottom: 1rem;
+}
diff --git a/doc-template/public/main.js b/doc-template/public/main.js
new file mode 100644
index 0000000..e60362e
--- /dev/null
+++ b/doc-template/public/main.js
@@ -0,0 +1,10 @@
+export default {
+ defaultTheme: "auto",
+ iconLinks: [
+ {
+ icon: "git",
+ href: "https://git.bitbadger.solutions/bit-badger/BitBadger.Documents",
+ title: "Source Repository"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/docfx.json b/docfx.json
new file mode 100644
index 0000000..b4ce50f
--- /dev/null
+++ b/docfx.json
@@ -0,0 +1,59 @@
+{
+ "$schema": "https://raw.githubusercontent.com/dotnet/docfx/main/schemas/docfx.schema.json",
+ "metadata": [
+ {
+ "src": [
+ {
+ "src": "./src",
+ "files": [
+ "Common/bin/Release/net9.0/*.dll",
+ "Postgres/bin/Release/net9.0/*.dll",
+ "Sqlite/bin/Release/net9.0/*.dll"
+ ]
+ }
+ ],
+ "dest": "api",
+ "properties": {
+ "TargetFramework": "net9.0"
+ }
+ }
+ ],
+ "build": {
+ "content": [
+ {
+ "files": [
+ "index.md",
+ "toc.yml",
+ "api/**/*.{md,yml}",
+ "docs/**/*.{md,yml}"
+ ],
+ "exclude": [
+ "_site/**"
+ ]
+ }
+ ],
+ "resource": [
+ {
+ "files": [
+ "bitbadger-doc.png",
+ "favicon.ico"
+ ]
+ }
+ ],
+ "output": "_site",
+ "template": [
+ "default",
+ "modern",
+ "doc-template"
+ ],
+ "globalMetadata": {
+ "_appName": "BitBadger.Documents",
+ "_appTitle": "BitBadger.Documents",
+ "_appLogoPath": "bitbadger-doc.png",
+ "_appFaviconPath": "favicon.ico",
+ "_appFooter": "Hand-crafted documentation created with docfx by Bit Badger Solutions",
+ "_enableSearch": true,
+ "pdf": false
+ }
+ }
+}
diff --git a/docs/advanced/custom-serialization.md b/docs/advanced/custom-serialization.md
new file mode 100644
index 0000000..b9e108c
--- /dev/null
+++ b/docs/advanced/custom-serialization.md
@@ -0,0 +1,38 @@
+# Custom Serialization
+
+_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._
+
+JSON documents are sent to and received from both PostgreSQL and SQLite as `string`s; the translation to and from your domain objects (commonly called POCOs) is handled via .NET. By default, the serializer used by the library is based on `System.Text.Json` with [converters for common F# types][fs].
+
+## Implementing a Custom Serializer
+
+`IDocumentSerializer` (found in the `BitBadger.Documents` namespace) specifies two methods. `Serialize` takes a `T` and returns a `string`; `Deserialize` takes a `string` and returns an instance of `T`. (These show as `'T` in F#.) While implementing those two methods is required, the custom implementation can use whatever library you desire, and contain converters for custom types.
+
+Once this serializer is implemented and constructed, provide it to the library:
+
+```csharp
+// C#
+var serializer = /* constructed serializer */;
+Configuration.UseSerializer(serializer);
+```
+
+```fsharp
+// F#
+let serializer = (* constructed serializer *)
+Configuration.useSerializer serializer
+```
+
+The biggest benefit to registering a serializer (apart from control) is that all JSON operations will use the same serializer. This is most important for PostgreSQL's JSON containment queries; the object you pass as the criteria will be translated properly before it is compared. However, "unstructured" data does not mean "inconsistently structured" data; if your application uses custom serialization, extending this to your documents ensures that the structure is internally consistent.
+
+## Uses for Custom Serialization
+
+- If you use a custom serializer (or serializer options) in your application, a custom serializer implementation can utilize these existing configuration options.
+- If you prefer [`Newtonsoft.Json`][nj], you can wrap `JsonConvert` or `JsonSerializer` calls in a custom converter. F# users may consider incorporating Microsoft's [`FSharpLu.Json`][fj] converter.
+- If your project uses [`NodaTime`][], your custom serializer could include its converters for `System.Text.Json` or `Newtonsoft.Json`.
+- If you use DDD to define custom types, you can implement converters to translate them to/from your preferred JSON representation.
+
+
+[fs]: https://github.com/Tarmil/FSharp.SystemTextJson "FSharp.SystemTextJson • GitHub"
+[nj]: https://www.newtonsoft.com/json "Json.NET"
+[fj]: https://github.com/microsoft/fsharplu/blob/main/FSharpLu.Json.md "FSharpLu.Json • GitHub"
+[`NodaTime`]: https://nodatime.org/ "NodaTime"
diff --git a/docs/advanced/index.md b/docs/advanced/index.md
new file mode 100644
index 0000000..270feb4
--- /dev/null
+++ b/docs/advanced/index.md
@@ -0,0 +1,16 @@
+# Advanced Usage
+
+_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._
+
+While the functions provided by the library cover lots of use cases, there are other times when applications need something else. Below are some of those.
+
+- [Customizing Serialization][ser]
+- [Related Documents and Custom Queries][rel]
+- [Transactions][txn]
+- [Referential Integrity with Documents][ref] (PostgreSQL only; conceptual)
+
+
+[ser]: ./custom-serialization.md "Advanced Usage: Custom Serialization • BitBadger.Documents"
+[rel]: ./related.md "Advanced Usage: Related Documents • BitBadger.Documents"
+[txn]: ./transactions.md "Advanced Usage: Transactions • BitBadger.Documents"
+[ref]: /concepts/referential-integrity.html "Appendix: Referential Integrity with Documents • Concepts • Relationanl Documents"
diff --git a/docs/advanced/related.md b/docs/advanced/related.md
new file mode 100644
index 0000000..b75b50c
--- /dev/null
+++ b/docs/advanced/related.md
@@ -0,0 +1,379 @@
+# Related Documents and Custom Queries
+
+_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._
+
+_NOTE: This page is longer than the ideal documentation page. Understanding how to assemble custom queries requires understanding how data is stored, and the list of ways to retrieve information can be... a lot. The hope is that one reading will serve as education, and the lists of options will serve as reference lists that will assist you in crafting your queries._
+
+## Overview
+
+Document stores generally have fewer relationships than traditional relational databases, particularly those that arise when data is structured in [Third Normal Form][tnf]; related collections are stored in the document, and ever-increasing surrogate keys (_a la_ sequences and such) do not play well with distributed data. Unless all data is stored in a single document, though, there will still be a natural relation between documents.
+
+Thinking back to our earlier examples, we did not store the collection of rooms in each hotel's document; each room is its own document and contains the ID of the hotel as one of its properties.
+
+```csharp
+// C#
+public class Hotel
+{
+ public string Id { get; set; } = "";
+ // ... more properties
+}
+
+public class Room
+{
+ public string Id { get; set; } = "";
+ public string HotelId { get; set; } = "";
+ // ... more properties
+}
+```
+
+```fsharp
+// F#
+[]
+type Hotel =
+ { Id: string
+ // ... more fields
+ }
+
+[]
+type Room =
+ { Id: string
+ HotelId: string
+ // ... more fields
+ }
+```
+
+> The `CLIMutable` attribute is required on record types that are instantiated by the CLR; this attribute generates a zero-parameter constructor.
+
+## Document Table SQL in Depth
+
+The library creates tables with a `data` column of type `JSONB` (PostgreSQL) or `TEXT` (SQLite), with a unique index on the configured ID name that serves as the primary key (for these examples, we'll assume it's the default `Id`). The indexes created by the library all apply to the `data` column. The by-ID query for a hotel would be...
+
+```sql
+SELECT data FROM hotel WHERE data->>'Id' = @id
+```
+
+...with the ID passed as the `@id` parameter.
+
+> _Using a "building block" method/function `Query.WhereById` will create the `data->>'Id' = @id` criteria using [the configured ID name][id]._
+
+Finding all the rooms for a hotel, using our indexes we created earlier, could use a field comparison query...
+
+```sql
+SELECT data FROM room WHERE data->>'HotelId' = @field
+```
+
+...with `@field` being "abc123"; PostgreSQL could also use a JSON containment query...
+
+```sql
+SELECT data FROM room WHERE data @> @criteria
+```
+
+...with something like `new { HotelId = "abc123" }` passed as the matching document in the `@criteria` parameter.
+
+So far, so good; but, if we're looking up a room, we do not want to have to make 2 queries just to also be able to display the hotel's name. The `WHERE` clause on the first query above uses the expression `data->>'Id'`; this extracts a field from a JSON column as `TEXT` in PostgreSQL (or "best guess" in SQLite, but usually text). Since this is the value our unique index indexes, and we are using a relational database, we can write an efficient JOIN between these two tables.
+
+```sql
+SELECT r.data, h.data AS hotel_data
+ FROM room r
+ INNER JOIN hotel h ON h.data->>'Id' = r.data->>'HotelId'
+ WHERE r.data->>'Id' = @id
+```
+
+_(This syntax would work without the unique index; for PostgreSQL, it would default to using the GIN index (`Full` or `Optimized`), if it exists, but it wouldn't be quite as efficient as a zero-or-one unique index lookup. For SQLite, this would result in a full table scan. Both PostgreSQL and SQLite also support a `->` operator, which extracts the field as a JSON value instead of its text.)_
+
+## Using Building Blocks
+
+Most of the data access methods in both libraries are built up from query fragments and reusable functions; these are exposed for use in building custom queries.
+
+### Queries
+
+For every method or function described in [Basic Usage][], the `Query` static class/module contains the building blocks needed to construct query for that operation. Both the parent and implementation namespaces have a `Query` module; in C#, you'll need to qualify the implementation module namespace.
+
+In `BitBadger.Documents.Query`, you'll find:
+- **StatementWhere** takes a SQL statement and a `WHERE` clause and puts them together on either side of the text ` WHERE `
+- **Definition** contains methods/functions to ensure tables, their keys, and field indexes exist.
+- **Insert**, **Save**, **Count**, **Find**, **Update**, and **Delete** are the prefixes of the queries for those actions; they all take a table name and return this query (with no `WHERE` clause)
+- **Exists** also requires a `WHERE` clause, due to how the query is constructed
+ because it is inserted as a subquery
+
+Within each implementation's `Query` module:
+- **WhereByFields** takes a `FieldMatch` case and a set of fields. `Field` has constructor functions for each comparison it supports; these functions generally take a field name and a value, though the latter two do not require a value.
+ - **Equal** uses `=` to create an equality comparison
+ - **Greater** uses `>` to create a greater-than comparison
+ - **GreaterOrEqual** uses `>=` to create a greater-than-or-equal-to comparison
+ - **Less** uses `<` to create a less-than comparison
+ - **LessOrEqual** uses `<=` to create a less-than-or-equal-to comparison
+ - **NotEqual** uses `<>` to create a not-equal comparison
+ - **Between** uses `BETWEEN` to create a range comparison
+ - **In** uses `IN` to create an equality comparison within a set of given values
+ - **InArray** uses `?|` in PostgreSQL, and a combination of `EXISTS` / `json_each` / `IN` in SQLite, to create an equality comparison within a given set of values against an array in a JSON document
+ - **Exists** uses `IS NOT NULL` to create an existence comparison
+ - **NotExists** uses `IS NULL` to create a non-existence comparison; fields are considered null if they are either not part of the document, or if they are part of the document but explicitly set to `null`
+- **WhereById** takes a parameter name and generates a field `Equal` comparison against the configured ID field.
+- **Patch** and **RemoveFields** use each implementation's unique syntax for partial updates and field removals.
+- **ByFields**, **ByContains** (PostgreSQL), and **ByJsonPath** (PostgreSQL) are functions that take a statement and the criteria, and construct a query to fit that criteria. For `ByFields`, each field parameter will use its specified name if provided (an incrementing `field[n]` if not). `ByContains` uses `@criteria` as its parameter name, which can be any object. `ByJsonPath` uses `@path`, which should be a `string`.
+
+That's a lot of reading! Some examples a bit below will help this make sense.
+
+### Parameters
+
+Traditional ADO.NET data access involves creating a connection object, then adding parameters to that object. This library follows a more declarative style, where parameters are passed via `IEnumerable` collections. To assist with creating these collections, each implementation has some helper functions. For C#, these calls will need to be prefixed with `Parameters`; for F#, this module is auto-opened. This is one area where names differ in other than just casing, so both will be listed.
+
+- **Parameters.Id** / **idParam** generate an `@id` parameter with the numeric, `string`, or `ToString()`ed value of the ID passed.
+- **Parameters.Json** / **jsonParam** generate a user-provided-named JSON-formatted parameter for the value passed (this can be used for PostgreSQL's JSON containment queries as well)
+- **Parameters.AddFields** / **addFieldParams** append field parameters to the given parameter list
+- **Parameters.FieldNames** / **fieldNameParams** create parameters for the list of field names to be removed; for PostgreSQL, this returns a single parameter, while SQLite returns a list of parameters
+- **Parameters.None** / **noParams** is an empty set of parameters, and can be cleaner and convey intent better than something like `new[] { }` _(For C# 12 or later, the collection expression `[]` is much terser.)_
+
+If you need a parameter beyond these, both `NpgsqlParameter` and `SqliteParameter` have a name-and-value constructor; that isn't many more keystrokes.
+
+### Results
+
+The `Results` module is implementation specific. Both libraries provide `Results.FromData`, which deserializes a `data` column into the requested type; and `FromDocument`, which does the same thing, but allows the column to be named as well. We'll see how we can use these in further examples. As with parameters, C# users need to qualify the class name, but the module is auto-opened for F#.
+
+## Putting It All Together
+
+The **Custom** static class/module has seven methods/functions:
+
+- **List** requires a query, parameters, and a mapping function, and returns a list of documents.
+- **JsonArray** is the same as `List`, but returns the documents as `string` in a JSON array.
+- **WriteJsonArray** writes documents to a `PipeWriter` as they are read from the database; the result is the same a `JsonArray`, but no unified strings is constructed.
+- **Single** requires a query, parameters, and a mapping function, and returns one or no documents (C# `TDoc?`, F# `'TDoc option`)
+- **JsonSingle** is the same as `Single`, but returns a JSON `string` instead (returning `{}` if no document is found).
+- **Scalar** requires a query, parameters, and a mapping function, and returns a scalar value (non-nullable; used for counts, existence, etc.)
+- **NonQuery** requires a query and parameters and has no return value
+
+> _Within each library, every other call is written in terms of these functions; your custom queries will use the same code the provided ones do!_
+
+Let's jump in with an example. When we query for a room, let's say that we also want to retrieve its hotel information as well. We saw the query above, but here is how we can implement it using a custom query.
+
+```csharp
+// C#, All
+// return type is Tuple?
+var data = await Custom.Single(
+ $"SELECT r.data, h.data AS hotel_data
+ FROM room r
+ INNER JOIN hotel h ON h.data->>'{Configuration.IdField()}' = r.data->>'HotelId'
+ WHERE r.{Query.WhereById("@id")}",
+ new[] { Parameters.Id("my-room-key") },
+ // rdr's type will be RowReader for PostgreSQL, SqliteDataReader for SQLite
+ rdr => Tuple.Create(Results.FromData(rdr), Results.FromDocument("hotel_data", rdr));
+if (data is not null)
+{
+ var (room, hotel) = data;
+ // do stuff with the room and hotel data
+}
+```
+
+```fsharp
+// F#, All
+// return type is (Room * Hotel) option
+let! data =
+ Custom.single
+ $"""SELECT r.data, h.data AS hotel_data
+ FROM room r
+ INNER JOIN hotel h ON h.data->>'{Configuration.idField ()}' = r.data->>'HotelId'
+ WHERE r.{Query.whereById "@id"}"""
+ [ idParam "my-room-key" ]
+ // rdr's type will be RowReader for PostgreSQL, SqliteDataReader for SQLite
+ fun rdr -> (fromData rdr), (fromDocument "hotel_data" rdr)
+match data with
+| Some (Room room, Hotel hotel) ->
+ // do stuff with room and hotel
+| None -> ()
+```
+
+These queries use `Configuration.IdField` and `WhereById` to use the configured ID field. Creating custom queries using these building blocks allows us to utilize the configured value without hard-coding it throughout our custom queries. If the configuration changes, these queries will pick up the new field name seamlessly.
+
+While this example retrieves the entire document, this is not required. If we only care about the name of the associated hotel, we could amend the query to retrieve only that information.
+
+```csharp
+// C#, All
+// return type is Tuple?
+var data = await Custom.Single(
+ $"SELECT r.data, h.data ->> 'Name' AS hotel_name
+ FROM room r
+ INNER JOIN hotel h ON h.data->>'{Configuration.IdField()}' = r.data->>'HotelId'
+ WHERE r.{Query.WhereById("@id")}",
+ new[] { Parameters.Id("my-room-key") },
+ // PostgreSQL
+ row => Tuple.Create(Results.FromData(row), row.string("hotel_name")));
+ // SQLite; could use rdr.GetString(rdr.GetOrdinal("hotel_name")) below as well
+ // rdr => Tuple.Create(Results.FromData(rdr), rdr.GetString(1)));
+
+if (data is not null)
+{
+ var (room, hotelName) = data;
+ // do stuff with the room and hotel name
+}
+```
+
+```fsharp
+// F#, All
+// return type is (Room * string) option
+let! data =
+ Custom.single
+ $"""SELECT r.data, h.data->>'Name' AS hotel_name
+ FROM room r
+ INNER JOIN hotel h ON h.data->>'{Configuration.idField ()}' = r.data->>'HotelId'
+ WHERE r.{Query.whereById "@id"}"""
+ [ idParam "my-room-key" ]
+ // PostgreSQL
+ fun row -> (fromData row), row.string "hotel_name"
+ // SQLite; could use rdr.GetString(rdr.GetOrdinal("hotel_name")) below as well
+ // fun rdr -> (fromData rdr), rdr.GetString(1)
+match data with
+| Some (Room room, string hotelName) ->
+ // do stuff with room and hotel name
+| None -> ()
+```
+
+These queries are amazingly efficient, using 2 unique index lookups to return this data. Even though we do not have a foreign key between these two tables, simply being in a relational database allows us to retrieve this related data.
+
+Revisiting our "take these rooms out of service" SQLite query from the Basic Usage page, here's how that could look using building blocks available since version 4 (PostgreSQL will accept this query syntax as well, though the parameter types would be different):
+
+```csharp
+// C#, SQLite
+var fields = [Field.GreaterOrEqual("RoomNumber", 221), Field.LessOrEqual("RoomNumber", 240)];
+await Custom.NonQuery(
+ Sqlite.Query.ByFields(Sqlite.Query.Patch("room"), FieldMatch.All, fields,
+ new { InService = false }),
+ Parameters.AddFields(fields, []));
+```
+
+```fsharp
+// F#, SQLite
+let fields = [ Field.GreaterOrEqual "RoomNumber" 221; Field.LessOrEqual "RoomNumber" 240 ]
+do! Custom.nonQuery
+ (Query.byFields (Query.patch "room") All fields {| InService = false |})
+ (addFieldParams fields []))
+```
+
+This uses two field comparisons to incorporate the room number range instead of a `BETWEEN` clause; we would definitely want to have that field indexed if this was going to be a regular query or our data was going to grow beyond a trivial size.
+
+_You may be thinking "wait - what's the difference between that an the regular `Patch` call?" And you'd be right; that is exactly what `Patch.ByFields` does. `Between` is also a better comparison for this, and either `FieldMatch` type will work, as we're only passing one field. No building blocks required!_
+
+```csharp
+// C#, All
+await Patch.ByFields("room", FieldMatch.Any, [Field.Between("RoomNumber", 221, 240)],
+ new { InService = false });
+```
+
+```fsharp
+// F#, All
+do! Patch.byFields "room" Any [ Field.Between "RoomNumber" 221 240 ] {| InService = false |}
+```
+
+## Going Even Further
+
+### Updating Data in Place
+
+One drawback to document databases is the inability to update values in place; however, with a bit of creativity, we can do a lot more than we initially think. For a single field, SQLite has a `json_set` function that takes an existing JSON field, a field name, and a value to which it should be set. This allows us to do single-field updates in the database. If we wanted to raise our rates 10% for every room, we could use this query:
+
+```sql
+-- SQLite
+UPDATE room SET data = json_set(data, 'Rate', data->>'Rate' * 1.1)
+```
+
+If we get any more complex, though, Common Table Expressions (CTEs) can help us. Perhaps we decided that we only wanted to raise the rates for hotels in New York, Chicago, and Los Angeles, and we wanted to exclude any brand with the word "Value" in its name. A CTE lets us select the source data we need to craft the update, then use that in the `UPDATE`'s clauses.
+
+```sql
+-- SQLite
+WITH to_update AS
+ (SELECT r.data->>'Id' AS room_id, r.data->>'Rate' AS current_rate, r.data AS room_data
+ FROM room r
+ INNER JOIN hotel h ON h.data->>'Id' = r.data->>'HotelId'
+ WHERE h.data->>'City' IN ('New York', 'Chicago', 'Los Angeles')
+ AND LOWER(h.data->>'Name') NOT LIKE '%value%')
+UPDATE room
+ SET data = json_set(to_update.room_data, 'Rate', to_update.current_rate * 1.1)
+ WHERE room->>'Id' = to_update.room_id
+```
+
+Both PostgreSQL and SQLite provide JSON patching, where multiple fields (or entire structures) can be changed at once. Let's revisit our rate increase; if we are making the rate more than $500, we'll apply a status of "Premium" to the room. If it is less than that, it should keep its same value.
+
+First up, PostgreSQL:
+```sql
+-- PostgreSQL
+WITH to_update AS
+ (SELECT r.data->>'Id' AS room_id, (r.data->>'Rate')::decimal AS rate, r.data->>'Status' AS status
+ FROM room r
+ INNER JOIN hotel h ON h.data->>'Id' = r.data->>'HotelId'
+ WHERE h.data->>'City' IN ('New York', 'Chicago', 'Los Angeles')
+ AND LOWER(h.data ->> 'Name') NOT LIKE '%value%')
+UPDATE room
+ SET data = data ||
+ ('{"Rate":' || to_update.rate * 1.1 || '","Status":"'
+ || CASE WHEN to_update.rate * 1.1 > 500 THEN 'Premium' ELSE to_update.status END
+ || '"}')
+ WHERE room->>'Id' = to_update.room_id
+```
+
+In SQLite:
+```sql
+-- SQLite
+WITH to_update AS
+ (SELECT r.data->>'Id' AS room_id, r.data->>'Rate' AS rate, r.data->>'Status' AS status
+ FROM room r
+ INNER JOIN hotel h ON h.data->>'Id' = r.data->>'HotelId'
+ WHERE h.data->>'City' IN ('New York', 'Chicago', 'Los Angeles')
+ AND LOWER(h.data->>'Name') NOT LIKE '%value%')
+UPDATE room
+ SET data = json_patch(data, json(
+ '{"Rate":' || to_update.rate * 1.1 || '","Status":"'
+ || CASE WHEN to_update.rate * 1.1 > 500 THEN 'Premium' ELSE to_update.status END
+ || '"}'))
+ WHERE room->>'Id' = to_update.room_id
+```
+
+For PostgreSQL, `->>` always returns text, so we need to cast the rate to a number. In either case, we do not want to use this technique for user-provided data; however, in place, it allowed us to complete all of our scenarios without having to load the documents into our application and manipulate them there.
+
+Updates in place may not need parameters (though it would be easy to foresee a "rate adjustment" feature where the 1.1 adjustment was not hard-coded); in fact, none of the samples in this section used the document libraries at all. These queries can be executed by `Custom.NonQuery`, though, providing parameters as required.
+
+### Using This Library for Non-Document Queries
+
+The `Custom` methods/functions can be used with non-document tables as well. This may be a convenient and consistent way to access your data, while delegating connection management to the library and its configured data source.
+
+Let's walk through a short example using C# and PostgreSQL:
+
+```csharp
+// C#, PostgreSQL
+using Npgsql.FSharp; // Needed for RowReader and Sql types
+using static CommonExtensionsAndTypesForNpgsqlFSharp; // Needed for Sql functions
+
+// Stores metadata for a given user
+public class MetaData
+{
+ public string Id { get; set; } = "";
+ public string UserId { get; set; } = "";
+ public string Key { get; set; } = "";
+ public string Value { get; set; } = "";
+}
+
+// Static class to hold mapping functions
+public static class Map
+{
+ // These parameters are the column names from the underlying table
+ public MetaData ToMetaData(RowReader row) =>
+ new MetaData
+ {
+ Id = row.string("id"),
+ UserId = row.string("user_id"),
+ Key = row.string("key"),
+ Value = row.string("value")
+ };
+}
+
+// somewhere in a class, retrieving data
+public Task> MetaDataForUser(string userId) =>
+ Document.Custom.List("SELECT * FROM user_metadata WHERE user_id = @userId",
+ new { Tuple.Create("@userId", Sql.string(userId)) },
+ Map.ToMetaData);
+```
+
+For F#, the `using static` above is not needed; that module is auto-opened when `Npgsql.FSharp` is opened. For SQLite in either language, the mapping function uses a `SqliteDataReader` object, which implements the standard ADO.NET `DataReader` functions of `Get[Type](idx)` (and `GetOrdinal(name)` for the column index).
+
+
+[tnf]: https://en.wikipedia.org/wiki/Third_normal_form "Third Normal Form • Wikipedia"
+[id]: ../getting-started.md#field-name "Getting Started (ID Fields) • BitBadger.Documents"
+[Basic Usage]: ../basic-usage.md "Basic Usage • BitBadger.Documents"
diff --git a/docs/advanced/transactions.md b/docs/advanced/transactions.md
new file mode 100644
index 0000000..703b075
--- /dev/null
+++ b/docs/advanced/transactions.md
@@ -0,0 +1,96 @@
+# Transactions
+
+_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._
+
+On occasion, there may be a need to perform multiple updates in a single database transaction, where either all updates succeed, or none do.
+
+## Controlling Database Transactions
+
+The `Configuration` static class/module of each library [provides a way to obtain a connection][conn]. Whatever strategy your application uses to obtain the connection, the connection object is how ADO.NET implements transactions.
+
+```csharp
+// C#, All
+// "conn" is assumed to be either NpgsqlConnection or SqliteConnection
+await using var txn = await conn.BeginTransactionAsync();
+try
+{
+ // do stuff
+ await txn.CommitAsync();
+}
+catch (Exception ex)
+{
+ await txn.RollbackAsync();
+ // more error handling
+}
+```
+
+```fsharp
+// F#, All
+// "conn" is assumed to be either NpgsqlConnection or SqliteConnection
+use! txn = conn.BeginTransactionAsync ()
+try
+ // do stuff
+ do! txn.CommitAsync ()
+with ex ->
+ do! txt.RollbackAsync ()
+ // more error handling
+```
+
+## Executing Queries on the Connection
+
+This precise scenario was the reason that all methods and functions are implemented on the connection object; all extensions execute the commands in the context of the connection. Imagine an application where a user signs in. We may want to set an attribute on the user record that says that now is the last time they signed in; and we may also want to reset a failed logon counter, as they have successfully signed in. This would look like:
+
+```csharp
+// C#, All ("conn" is our connection object)
+await using var txn = await conn.BeginTransactionAsync();
+try
+{
+ await conn.PatchById("user_table", userId, new { LastSeen = DateTime.Now });
+ await conn.PatchById("security", userId, new { FailedLogOnCount = 0 });
+ await txn.CommitAsync();
+}
+catch (Exception ex)
+{
+ await txn.RollbackAsync();
+ // more error handling
+}
+```
+
+```fsharp
+// F#, All ("conn" is our connection object)
+use! txn = conn.BeginTransactionAsync()
+try
+ do! conn.patchById "user_table" userId {| LastSeen = DateTime.Now |}
+ do! conn.patchById "security" userId {| FailedLogOnCount = 0 |}
+ do! txn.CommitAsync()
+with ex ->
+ do! txn.RollbackAsync()
+ // more error handling
+```
+
+### A Functional Alternative
+
+The PostgreSQL library has a static class/module called `WithProps`; the SQLite library has a static class/module called `WithConn`. Each of these accept the `SqlProps` or `SqliteConnection` parameter as the last parameter of the query. For SQLite, we need nothing else to pass the connection to these methods/functions; for PostgreSQL, though, we'll need to create a `SqlProps` object based off the connection.
+
+```csharp
+// C#, PostgreSQL
+using Npgsql.FSharp;
+// ...
+var props = Sql.existingConnection(conn);
+// ...
+await WithProps.Patch.ById("user_table", userId, new { LastSeen = DateTime.Now }, props);
+```
+
+```fsharp
+// F#, PostgreSQL
+open Npgsql.FSharp
+// ...
+let props = Sql.existingConnection conn
+// ...
+do! WithProps.Patch.ById "user_table" userId {| LastSeen = DateTime.Now |} props
+```
+
+If we do not want to qualify with `WithProps` or `WithConn`, C# users can add `using static [WithProps|WithConn];` to bring these functions into scope; F# users can add `open BitBadger.Documents.[Postgres|Sqlite].[WithProps|WithConn]` to bring them into scope. However, in C#, this will affect the entire file, and in F#, it will affect the file from that point through the end of the file. Unless you want to go all-in with the connection-last functions, it is probably better to qualify the occasional call.
+
+
+[conn]: ../getting-started.md#the-connection "Getting Started (The Connection) • BitBadger.Documents"
diff --git a/docs/basic-usage.md b/docs/basic-usage.md
new file mode 100644
index 0000000..6cb6caa
--- /dev/null
+++ b/docs/basic-usage.md
@@ -0,0 +1,149 @@
+# Basic Usage
+
+_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._
+
+## Overview
+
+There are several categories of operations that can be accomplished against documents.
+
+- **Count** returns the number of documents matching some criteria
+- **Exists** returns true if any documents match the given criteria
+- **Insert** adds a new document, failing if the ID field is not unique
+- **Save** adds a new document, updating an existing one if the ID is already present ("upsert")
+- **Update** updates an existing document, doing nothing if no documents satisfy the criteria
+- **Patch** updates a portion of an existing document, doing nothing if no documents satisfy the criteria
+- **Find** returns the documents matching some criteria as domain objects
+- **Json** returns or writes documents matching some criteria as JSON text
+- **RemoveFields** removes fields from documents matching some criteria
+- **Delete** removes documents matching some criteria
+
+`Insert` and `Save` were the only two that don't mention criteria. For the others, "some criteria" can be defined a few different ways:
+- **All** references all documents in the table; applies to Count and Find
+- **ById** looks for a single document on which to operate; applies to all but Count
+- **ByFields** uses JSON field comparisons to select documents for further processing (PostgreSQL will use a numeric comparison if the field value is numeric, or a string comparison otherwise; SQLite will do its usual [best-guess on types][]{target=_blank rel=noopener}); applies to all but Update
+- **ByContains** (PostgreSQL only) uses a JSON containment query (the `@>` operator) to find documents where the given sub-document occurs (think of this as an `=` comparison based on one or more properties in the document; looking for hotels with `{ "Country": "USA", "Rating": 4 }` would find all hotels with a rating of 4 in the United States); applies to all but Update
+- **ByJsonPath** (PostgreSQL only) uses a JSON patch match query (the `@?` operator) to make specific queries against a document's structure (it also supports more operators than a containment query; to find all hotels rated 4 _or higher_ in the United States, we could query for `"$ ? (@.Country == \"USA\" && @.Rating > 4)"`); applies to all but Update
+
+Finally, `Find` and `Json` also have `FirstBy*` implementations for all supported criteria types, and `Find*Ordered` implementations to sort the results in the database.
+
+## Saving Documents
+
+The library provides three different ways to save data. The first equates to a SQL `INSERT` statement, and adds a single document to the repository.
+
+```csharp
+// C#, All
+var room = new Room(/* ... */);
+// Parameters are table name and document
+await Document.Insert("room", room);
+```
+
+```fsharp
+// F#, All
+let room = { Room.empty with (* ... *) }
+do! insert "room" room
+```
+
+The second is `Save`; and inserts the data it if does not exist and replaces the document if it does exist (what some call an "upsert"). It utilizes the `ON CONFLICT` syntax to ensure an atomic statement. Its parameters are the same as those for `Insert`.
+
+The third equates to a SQL `UPDATE` statement. `Update` applies to a full document and is usually used by ID, while `Patch` is used for partial updates and may be done by field comparison, JSON containment, or JSON Path match. For a few examples, let's begin with a query that may back the "edit hotel" page. This page lets the user update nearly all the details for the hotel, so updating the entire document would be appropriate.
+
+```csharp
+// C#, All
+var hotel = await Document.Find.ById("hotel", hotelId);
+if (!(hotel is null))
+{
+ // update hotel properties from the posted form
+ await Update.ById("hotel", hotel.Id, hotel);
+}
+```
+
+```fsharp
+// F#, All
+match! Find.byId "hotel" hotelId with
+| Some hotel ->
+ do! Update.byId "hotel" hotel.Id updated
+ { hotel with (* properties from posted form *) }
+| None -> ()
+```
+
+For the next example, suppose we are upgrading our hotel, and need to take rooms 221-240 out of service*. We can utilize a patch via JSON Path** to accomplish this.
+
+```csharp
+// C#, PostgreSQL
+await Patch.ByJsonPath("room",
+ "$ ? (@.HotelId == \"abc\" && (@.RoomNumber >= 221 && @.RoomNumber <= 240)",
+ new { InService = false });
+```
+
+```fsharp
+// F#, PostgreSQL
+do! Patch.byJsonPath "room"
+ "$ ? (@.HotelId == \"abc\" && (@.RoomNumber >= 221 && @.RoomNumber <= 240)"
+ {| InService = false |};
+```
+
+_* - we are ignoring the current reservations, end date, etc. This is very naïve example!_
+
+\** - Both PostgreSQL and SQLite can also accomplish this using the `Between` comparison and a `ByFields` query:
+
+```csharp
+// C#, Both
+await Patch.ByFields("room", FieldMatch.Any, [Field.Between("RoomNumber", 221, 240)],
+ new { InService = false });
+```
+
+```fsharp
+// F#, Both
+do! Patch.byFields "room" Any [ Field.Between "RoomNumber" 221 240 ] {| InService = false |}
+```
+
+This could also be done with `All`/`FieldMatch.All` and `GreaterOrEqual` and `LessOrEqual` field comparisons, or even a custom query; these are fully explained in the [Advanced Usage][] section.
+
+> There is an `Update.ByFunc` variant that takes an ID extraction function run against the document instead of its ID. This is detailed in the [Advanced Usage][] section.
+
+## Finding Documents as Domain Items
+
+Functions to find documents start with `Find.`. There are variants to find all documents in a table, find by ID, find by JSON field comparisons, find by JSON containment, or find by JSON Path. The hotel update example above utilizes an ID lookup; the descriptions of JSON containment and JSON Path show examples of the criteria used to retrieve using those techniques.
+
+`Find` methods and functions are generic; specifying the return type is crucial. Additionally, `ById` will need the type of the key being passed. In C#, `ById` and the `FirstBy*` methods will return `TDoc?`, with the value if it was found or `null` if it was not; `All` and other `By*` methods return `List` (from `System.Collections.Generic`). In F#, `byId` and the `firstBy*` functions will return `'TDoc option`; `all` and other `by*` functions return `'TDoc list`.
+
+`Find*Ordered` methods and function append an `ORDER BY` clause to the query that will sort the results in the database. These take, as their last parameter, a sequence of `Field` items; a `.Named` method allows for field creation for these names. Within these names, prefixing the name with `n:` will tell PostgreSQL to sort this field numerically rather than alphabetically; it has no effect in SQLite (it does its own [type coercion][best-guess on types]). Adding " DESC" at the end will sort high-to-low instead of low-to-high.
+
+## Finding Documents as JSON
+
+All `Find` methods and functions have two corresponding `Json` functions.
+
+* The first set return the expected document(s) as a `string`, and will always return valid JSON. Single-document queries with nothing found will return `{}`, while zero-to-many queries will return `[]` if no documents match the given criteria.
+* The second set are prefixed with `Write`, and take a `PipeWriter` immediately after the table name parameter. These functions write results to the given pipeline as they are retrieved from the database, instead of accumulating them all and returning a `string`. This can be useful for JSON API scenarios; ASP.NET Core's `HttpResponse.BodyWriter` property is a `PipeWriter` (and pipelines are [preferred over streams][pipes]).
+
+## Deleting Documents
+
+Functions to delete documents start with `Delete.`. Document deletion is supported by ID, JSON field comparison, JSON containment, or JSON Path match. The pattern is the same as for finding or partially updating. _(There is no library method provided to delete all documents, though deleting by JSON field comparison where a non-existent field is null would accomplish this.)_
+
+## Counting Documents
+
+Functions to count documents start with `Count.`. Documents may be counted by a table in its entirety, by JSON field comparison, by JSON containment, or by JSON Path match. _(Counting by ID is an existence check!)_
+
+## Document Existence
+
+Functions to check for existence start with `Exists.`. Documents may be checked for existence by ID, JSON field comparison, JSON containment, or JSON Path match.
+
+## What / How Cross-Reference
+
+The table below shows which commands are available for each access method. (X = supported for both, P = PostgreSQL only)
+
+| Operation | `All` | `ById` | `ByFields` | `ByContains` | `ByJsonPath` | `FirstByFields` | `FirstByContains` | `FirstByJsonPath` |
+|-----------------|:-----:|:------:|:----------:|:------------:|:------------:|:---------------:|:-----------------:|:-----------------:|
+| `Count` | X | | X | P | P | | | |
+| `Exists` | | X | X | P | P | | | |
+| `Find` / `Json` | X | X | X | P | P | X | P | P |
+| `Patch` | | X | X | P | P | | | |
+| `RemoveFields` | | X | X | P | P | | | |
+| `Delete` | | X | X | P | P | | | |
+
+`Insert`, `Save`, and `Update.*` operate on single documents.
+
+[best-guess on types]: https://sqlite.org/datatype3.html "Datatypes in SQLite • SQLite"
+[JSON Path]: https://www.postgresql.org/docs/15/functions-json.html#FUNCTIONS-SQLJSON-PATH "JSON Functions and Operators • PostgreSQL Documentation"
+[Advanced Usage]: ./advanced/index.md "Advanced Usage • BitBadger.Documents • Bit Badger Solutions"
+[pipes]: https://learn.microsoft.com/en-us/aspnet/core/fundamentals/middleware/request-response?view=aspnetcore-9.0 "Request and Response Operations • Microsoft Learn"
diff --git a/docs/getting-started.md b/docs/getting-started.md
new file mode 100644
index 0000000..3767ac0
--- /dev/null
+++ b/docs/getting-started.md
@@ -0,0 +1,187 @@
+# Getting Started
+## Overview
+
+Each library has three different ways to execute commands:
+- Functions/methods that have no connection parameter at all; for these, each call obtains a new connection. _(Connection pooling greatly reduced this overhead and churn on the database)_
+- Functions/methods that take a connection as the last parameter; these use the given connection to execute the commands.
+- Extensions on the `NpgsqlConnection` or `SqliteConnection` type (native for both C# and F#); these are the same as the prior ones, and the names follow a similar pattern (ex. `Count.All()` is exposed as `conn.CountAll()`).
+
+This provides flexibility in how connections are managed. If your application does not care about it, configuring the library is all that is required. If your application generally does not care, but needs a connection on occasion, one can be obtained from the library and used as required. If you are developing a web application, and want to use one connection per request, you can register the library's connection functions as a factory, and have that connection injected. We will cover the how-to below for each scenario, but it is worth considering before getting started.
+
+> A note on functions: the F# functions use `camelCase`, while C# calls use `PascalCase`. To cut down on the noise, this documentation will generally use the C# `Count.All` form; know that this is `Count.all` for F#, `conn.CountAll()` for the C# extension method, and `conn.countAll` for the F# extension.
+
+## Namespaces
+
+### C#
+
+```csharp
+using BitBadger.Documents;
+using BitBadger.Documents.[Postgres|Sqlite];
+```
+
+### F#
+
+```fsharp
+open BitBadger.Documents
+open BitBadger.Documents.[Postgres|Sqlite]
+```
+
+For F#, this order is significant; both namespaces have modules that share names, and this order will control which one shadows the other.
+
+## Configuring the Connection
+
+### The Connection String
+
+Both PostgreSQL and SQLite use the standard ADO.NET connection string format ([`Npgsql` docs][], [`Microsoft.Data.Sqlite` docs][]). The usual location for these is an `appsettings.json` file, which is then parsed into an `IConfiguration` instance. For SQLite, all the library needs is a connection string:
+
+```csharp
+// C#, SQLite
+// ...
+var config = ...; // parsed IConfiguration
+Sqlite.Configuration.UseConnectionString(config.GetConnectionString("SQLite"));
+// ...
+```
+
+```fsharp
+// F#, SQLite
+// ...
+let config = ...; // parsed IConfiguration
+Configuration.useConnectionString (config.GetConnectionString("SQLite"))
+// ...
+```
+
+For PostgreSQL, the library needs an `NpgsqlDataSource` instead. There is a builder that takes a connection string and creates it, so it still is not a lot of code: _(although this implements `IDisposable`, do not declare it with `using` or `use`; the library handles disposal if required)_
+
+```csharp
+// C#, PostgreSQL
+// ...
+var config = ...; // parsed IConfiguration
+var dataSource = new NpgsqlDataSourceBuilder(config.GetConnectionString("Postgres")).Build();
+Postgres.Configuration.UseDataSource(dataSource);
+// ...
+```
+
+```fsharp
+// F#, PostgreSQL
+// ...
+let config = ...; // parsed IConfiguration
+let dataSource = new NpgsqlDataSourceBuilder(config.GetConnectionString("Postgres")).Build()
+Configuration.useDataSource dataSource
+// ...
+```
+
+### The Connection
+
+- If the application does not care to control the connection, use the methods/functions that do not require one.
+- To retrieve an occasional connection (possibly to do multiple updates in a transaction), the `Configuration` static class/module for each implementation has a way. (For both of these, define the result with `using` or `use` so that they are disposed properly.)
+ - For PostgreSQL, the `DataSource()` method returns the configured `NpgsqlDataSource` instance; from this, `OpenConnection[Async]()` can be used to obtain a connection.
+ - For SQLite, the `DbConn()` method returns a new, open `SqliteConnection`.
+- To use a connection per request in a web application scenario, register it with DI.
+
+```csharp
+// C#, PostgreSQL
+builder.Services.AddScoped(svcProvider =>
+ Postgres.Configuration.DataSource().OpenConnection());
+// C#, SQLite
+builder.Services.AddScoped(svcProvider => Sqlite.Configuration.DbConn());
+```
+
+```fsharp
+// F#, PostgreSQL
+let _ = builder.Services.AddScoped Configuration.dataSource().OpenConnection())
+// F#, SQLite
+let _ = builder.Services.AddScoped(fun sp -> Configuration.dbConn ())
+```
+
+After registering, this connection will be available on the request context and can be injected in the constructor for things like Razor Pages or MVC Controllers.
+
+## Configuring Document IDs
+
+### Field Name
+
+A common .NET pattern when naming unique identifiers for entities / documents / etc. is the name `Id`. By default, this library assumes that this field is the identifier for your documents. If your code follows this pattern, you will be happy with the default behavior. If you use a different property, or [implement a custom serializer][ser] to modify the JSON representation of your documents' IDs, though, you will need to configure that field name before you begin calling other functions or methods. A great spot for this is just after you configure the connection string or data source (above). If you have decided that the field "Name" is the unique identifier for your documents, your setup would look something like...
+
+```csharp
+// C#, All
+Configuration.UseIdField("Name");
+```
+
+```fsharp
+// F#, All
+Configuration.useIdField "Name"
+```
+
+Setting this will make `EnsureTable` create the unique index on that field when it creates a table, and will make all the `ById` functions and methods look for `data->>'Name'` instead of `data->>'Id'`. JSON is case-sensitive, so if the JSON is camel-cased, this should be configured to be `id` instead of `Id` (or `name` to follow the example above).
+
+### Generation Strategy
+
+The library can also generate IDs if they are missing. There are three different types of IDs, and each case of the `AutoId` enumeration/discriminated union can be passed to `Configuration.UseAutoIdStrategy()` to configure the library.
+
+- `Number` generates a "max ID plus 1" query based on the current values of the table.
+- `Guid` generates a 32-character string from a Globally Unique Identifier (GUID), lowercase with no dashes.
+- `RandomString` generates random bytes and converts them to a lowercase hexadecimal string. By default, the string is 16 characters, but can be changed via `Configuration.UseIdStringLength()`. _(You can also use `AutoId.GenerateRandomString(length)` to generate these strings for other purposes; they make good salts, transient keys, etc.)_
+
+All of these are off by default (the `Disabled` case). Even when ID generation is configured, though, only IDs of 0 (for `Number`) or empty strings (for `Guid` and `RandomString`) will be generated. IDs are only generated on `Insert`.
+
+> Numeric IDs are a one-time decision. In PostgreSQL, once a document has a non-numeric ID, attempts to insert an automatic number will fail. One could switch from numbers to strings, and the IDs would be treated as such (`"33"` instead of `33`, for example). SQLite does a best-guess typing of columns, but once a string ID is there, the "max + 1" algorithm will not return the expected results.
+
+## Ensuring Tables and Indexes Exist
+
+Both PostgreSQL and SQLite store data in tables and can utilize indexes to retrieve that data efficiently. Each application will need to determine the tables and indexes it expects.
+
+To discover these concepts, let's consider a naive example of a hotel chain; they have several hotels, and each hotel has several rooms. While each hotel could have its rooms as part of a `Hotel` document, there would likely be a lot of contention when concurrent updates for rooms, so we will put rooms in their own table. The hotel will store attributes like name, address, etc.; while each room will have the hotel's ID (named `Id`), along with things like room number, floor, and a list of date ranges where the room is not available. (This could be for customer reservation, maintenance, etc.)
+
+_(Note that all "ensure" methods/functions below use the `IF NOT EXISTS` clause; they are safe to run each time the application starts up, and will do nothing if the tables or indexes already exist.)_
+
+### PostgreSQL
+
+We have a few options when it comes to indexing our documents. We can index a specific JSON field; each table's primary key is implemented as a unique index on the configured ID field. We can also use a GIN index to index the entire document, and that index can even be [optimized for a subset of JSON Path operators][json-index].
+
+Let's create a general-purpose index on hotels, a "HotelId" index on rooms, and an optimized document index on rooms.
+
+```csharp
+// C#, Postgresql
+await Definition.EnsureTable("hotel");
+await Definition.EnsureDocumentIndex("hotel", DocumentIndex.Full);
+await Definition.EnsureTable("room");
+// parameters are table name, index name, and fields to be indexed
+await Definition.EnsureFieldIndex("room", "hotel_id", ["HotelId"]);
+await Definition.EnsureDocumentIndex("room", DocumentIndex.Optimized);
+```
+
+```fsharp
+// F#, PostgreSQL
+do! Definition.ensureTable "hotel"
+do! Definition.ensureDocumentIndex "hotel" Full
+do! Definition.ensureTable "room"
+do! Definition.ensureFieldIndex "room" "hotel_id" [ "HotelId" ]
+do! Definition.ensureDocumentIndex "room" Optimized
+```
+
+### SQLite
+
+For SQLite, the only option for JSON indexes (outside some quite complex techniques) are indexes on fields. Just as traditional relational indexes, these fields can be specified in expected query order. In our example, if we indexed our rooms on hotel ID and room number, it could also be used for efficient retrieval just by hotel ID.
+
+Let's create hotel and room tables, then index rooms by hotel ID and room number.
+
+```csharp
+// C#, SQLite
+await Definition.EnsureTable("hotel");
+await Definition.EnsureTable("room");
+await Definition.EnsureIndex("room", "hotel_and_nbr", ["HotelId", "RoomNumber"]);
+```
+
+```fsharp
+// F#
+do! Definition.ensureTable "hotel"
+do! Definition.ensureTable "room"
+do! Definition.ensureIndex "room" "hotel_and_nbr", [ "HotelId"; "RoomNumber" ]
+```
+
+Now that we have tables, let's [use them][]!
+
+[`Npgsql` docs]: https://www.npgsql.org/doc/connection-string-parameters "Connection String Parameter • Npgsql"
+[`Microsoft.Data.Sqlite` docs]: https://learn.microsoft.com/en-us/dotnet/standard/data/sqlite/connection-strings "Connection Strings • Microsoft.Data.Sqlite • Microsoft Learn"
+[ser]: ./advanced/custom-serialization.md "Advanced Usage: Custom Serialization • BitBadger.Documents"
+[json-index]: https://www.postgresql.org/docs/current/datatype-json.html#JSON-INDEXING "Indexing JSON Fields • PostgreSQL"
+[use them]: ./basic-usage.md "Basic Usage • BitBadger.Documents"
diff --git a/docs/toc.yml b/docs/toc.yml
new file mode 100644
index 0000000..941f98c
--- /dev/null
+++ b/docs/toc.yml
@@ -0,0 +1,21 @@
+- name: Getting Started
+ href: getting-started.md
+- name: Basic Usage
+ href: basic-usage.md
+- name: Advanced Usage
+ href: advanced/index.md
+ items:
+ - name: Custom Serialization
+ href: advanced/custom-serialization.md
+ - name: Related Documents and Custom Queries
+ href: advanced/related.md
+ - name: Transactions
+ href: advanced/transactions.md
+- name: Upgrading
+ items:
+ - name: v3 to v4
+ href: upgrade/v4.md
+ - name: v2 to v3
+ href: upgrade/v3.md
+ - name: v1 to v2
+ href: upgrade/v2.md
diff --git a/docs/upgrade/v2.md b/docs/upgrade/v2.md
new file mode 100644
index 0000000..fcf8acf
--- /dev/null
+++ b/docs/upgrade/v2.md
@@ -0,0 +1,37 @@
+# Migrating from v1 to v2
+
+_NOTE: This was an upgrade for the `BitBadger.Npgsql.Documents` library, which this library replaced as of v3._
+
+## Why
+
+In version 1 of this library, the document tables used by this library had two columns: `id` and `data`. `id` served as the primary key, and `data` was the `JSONB` column for the document. Since its release, the author learned that a field in a `JSONB` column could have a unique index that would then serve the role of a primary key.
+
+Version 2 of this library implements this change, both in table setup and in how it constructs queries that occur by a document's ID.
+
+## How
+
+On the [GitHub release page][], there is a MigrateToV2 utility program - one for Windows, and one for Linux. Download and extract the single file in the archive; it requires no installation. It uses an environment variable for the connection string, and takes a table name and an ID column field via the command line.
+
+A quick example under Linux/bash (assuming the ID field in the JSON document is named `Id`)...
+```
+export PGDOC_CONN_STR="Host=localhost;Port=5432;User ID=example_user;Password=example_pw;Database=my_docs"
+./MigrateToV2 ex.doc_table
+./MigrateToV2 ex.another_one
+```
+
+If the ID field has a different name, it can be passed as a second parameter. The utility will display the table name and ID field and ask for confirmation; if you are scripting it, you can set the environment variable `PGDOC_I_KNOW_WHAT_I_AM_DOING` to `true`, and it will bypass this confirmation. Note that the utility itself is quite basic; you are responsible for giving it sane input. If you have customized the tables or the JSON serializer, though, keep reading.
+
+## What
+
+If you have extended the original tables, you may need to handle this migration within either PostgreSQL/psql or your code. The process entails two steps. First, create a unique index on the ID field; in this example, we'll use `name` for the example ID field. Then, drop the `id` column. The below SQL will accomplish this for the fictional `my_table` table.
+
+```sql
+CREATE UNIQUE INDEX idx_my_table_key ON my_table ((data ->> 'name'));
+ALTER TABLE my_table DROP COLUMN id;
+```
+
+If the ID field is different, you will also need to tell the library that. Use `Configuration.UseIdField("name")` (C#) / `Configuration.useIdField "name"` (F#) to specify the name. This will need to be done before queries are executed, as the library uses this field for ID queries. See the [Setting Up instructions][setup] for details on this new configuration parameter.
+
+
+[GitHub release page]: https://github.com/bit-badger/BitBadger.Npgsql.Documents
+[setup]: ../getting-started.md#configuring-document-ids "Getting Started • BitBadger.Documents"
diff --git a/docs/upgrade/v3.md b/docs/upgrade/v3.md
new file mode 100644
index 0000000..8622ebd
--- /dev/null
+++ b/docs/upgrade/v3.md
@@ -0,0 +1,11 @@
+# Upgrade from v2 to v3
+
+The biggest change with this release is that `BitBadger.Npgsql.Documents` became `BitBadger.Documents`, a set of libraries providing the same API over both PostgreSQL and SQLite (provided the underlying database supports it). Existing PostgreSQL users should have a smooth transition.
+
+* Drop `Npgsql` from namespace (`BitBadger.Npgsql.Documents` becomes `BitBadger.Documents`)
+* Add implementation (PostgreSQL namespace is `BitBadger.Documents.Postgres`, SQLite is `BitBadger.Documents.Sqlite`)
+* Both C# and F# idiomatic functions will be visible when those namespaces are `import`ed or `open`ed
+* There is a `Field` constructor for creating field conditions (though look at [v4][]'s changes here as well)
+
+
+[v4]: ./v4.md#op-type-removal "Upgrade from v3 to v4 • BitBadger.Documents"
diff --git a/docs/upgrade/v4.md b/docs/upgrade/v4.md
new file mode 100644
index 0000000..ef0660d
--- /dev/null
+++ b/docs/upgrade/v4.md
@@ -0,0 +1,35 @@
+# Upgrade from v3 to v4
+
+## The Quick Version
+
+- Add `BitBadger.Documents.[Postgres|Sqlite].Compat` to your list of `using` (C#) or `open` (F#) statements. This namespace has deprecated versions of the methods/functions that were removed in v4. These generate warnings, rather than the "I don't know what this is" compiler errors.
+- If your code referenced `Query.[Action].[ById|ByField|etc]`, the sides of the query on each side of the `WHERE` clause are now separate. A query to patch a document by its ID would go from `Query.Patch.ById(tableName)` to `Query.ById(Query.Patch(tableName))`. These functions may also require more parameters; keep reading for details on that.
+- Custom queries had to be used when querying more than one field, or when the results in the database needed to be ordered. v4 provides solutions for both of these within the library itself.
+
+## `ByField` to `ByFields` and PostgreSQL Numbers
+
+All methods/functions that ended with `ByField` now end with `ByFields`, and take a `FieldMatch` case (`Any` equates to `OR`, `All` equates to `AND`) and sequence of `Field` objects. These `Field`s need to have their values as well, because the PostgreSQL library will now cast the field from the document to numeric and bind the parameter as-is.
+
+That is an action-packed paragraph; these changes have several ripple effects throughout the library:
+- Queries like `Query.Find.ByField` would need the full collection of fields to generate the SQL. Instead, `Query.ByFields` takes a "first-half" statement as its first parameter, then the field match and parameters as its next two.
+- `Field` instances in version 3 needed to have a parameter name, which was specified externally to the object itself. In version 4, `ParameterName` is an optional member of the `Field` object, and the library will generate parameter names if it is missing. In both C# and F#, the `.WithParameterName(string)` method can be chained to the `Field.[OP]` call to specify a name, and F# users can also use the language's `with` keyword (`{ Field.EQ "TheField" "value" with ParameterName = Some "@theField" }`).
+
+## `Op` Type Removal
+
+The `Op` type has been replaced with a `Comparison` type which captures both the type of comparison and the object of the comparison in one type. This is considered an internal implementation detail, as that type was not intended for use outside the library; however, it was `public`, so its removal warrants at least a mention.
+
+Additionally, the addition of `In` and `InArray` field comparisons drove a change to the `Field` type's static creation functions. These now have the comparison spelled out, as opposed to the two-to-three character abbreviations. (These abbreviated functions still exists as aliases, so this change will not result in compile errors.) The functions to create fields are:
+
+| Old | New |
+|:-----:|-----------------------|
+| `EQ` | `Equal` |
+| `GT` | `Greater` |
+| `GE` | `GreaterOrEqual` |
+| `LT` | `Less` |
+| `LE` | `LessOrEqual` |
+| `NE` | `NotEqual` |
+| `BT` | `Between` |
+| `IN` | `In` _(since v4 rc1)_ |
+| -- | `InArray` _(v4 rc4)_ |
+| `EX` | `Exists` |
+| `NEX` | `NotExists` |
diff --git a/favicon.ico b/favicon.ico
new file mode 100644
index 0000000..22ca446
Binary files /dev/null and b/favicon.ico differ
diff --git a/index.md b/index.md
new file mode 100644
index 0000000..0f7bc1c
--- /dev/null
+++ b/index.md
@@ -0,0 +1,93 @@
+---
+_layout: landing
+title: Welcome!
+---
+
+BitBadger.Documents provides a lightweight document-style interface over [PostgreSQL][]'s and [SQLite][]'s JSON storage capabilities, with first-class support for both C# and F# programs. _(It is developed by the community; it is not officially affiliated with either project.)_
+
+> [!TIP]
+> Expecting `BitBadger.Npgsql.Documents`? This library replaced it as of v3.
+
+## Installing
+
+### PostgreSQL [![Nuget (with prereleases)][pkg-shield-pgsql]][pkg-link-pgsql]
+
+```shell
+dotnet add package BitBadger.Documents.Postgres
+```
+
+### SQLite [![Nuget (with prereleases)][pkg-shield-sqlite]][pkg-link-sqlite]
+
+```shell
+dotnet add package BitBadger.Documents.Sqlite
+```
+
+## Using
+
+- **[Getting Started][]** provides an overview of the libraries' functions, how to provide connection details, and how to ensure required tables and indexes exist.
+- **[Basic Usage][]** details document-level retrieval, persistence, and deletion.
+- **[Advanced Usage][]** demonstrates how to use the building blocks provided by this library to write slightly-more complex queries.
+
+## Upgrading Major Versions
+
+* [v3 to v4][v3v4] ([Release][v4rel]) - Multiple field queries, ordering support, and automatic IDs
+* [v2 to v3][v2v3] ([Release][v3rel]; upgrade from `BitBadger.Npgsql.Documents`) - Namespace / project change
+* [v1 to v2][v1v2] ([Release][v2rel]) - Data storage format change
+
+## Why Documents?
+
+Document databases usually store JSON objects (as their "documents") to provide schemaless persistence of data; they also provide fault-tolerant ways to query that possibly-unstructured data. [MongoDB][] was the pioneer and is the leader in this space, but there are several who provide their own take on it, and their own programming API to come along with it. They also usually have some sort of clustering, replication, and sharding solution that allows them to be scaled out (horizontally) to handle a large amount of traffic.
+
+As a mature relational database, PostgreSQL has a long history of robust data access from the .NET environment; Npgsql is actively developed, and provides both ADO.NET and EF Core APIs. PostgreSQL also has well-established, battle-tested horizontal scaling options. Additionally, the [Npgsql.FSharp][] project provides a functional API over Npgsql's ADO.NET data access. These three factors make PostgreSQL an excellent choice for document storage, and its relational nature can help in areas where traditional document databases become more complex.
+
+SQLite is another mature relational database implemented as a single file, with its access run in-process with the calling application. It works very nicely on its own, with caching and write-ahead logging options; a companion project called [Litestream][] allows these files to be continuously streamed elsewhere, providing point-in-time recovery capabilities one would expect from a relational database. Microsoft provides ADO.NET (and EF Core) drivers for SQLite as part of .NET. These combine to make SQLite a compelling choice, and the hybrid relational/document model allows users to select the model of data that fits their model the best.
+
+In both cases, the document access functions provided by this library are dead-simple. For more complex queries, it also provides the building blocks to construct these with minimal code.
+
+## Why Not [something else]?
+
+We are blessed to live in a time where there are a lot of good data storage options that are more than efficient enough for the majority of use cases. Rather than speaking ill of other projects, here is the vision of the benefits these libraries aim to provide:
+
+### PostgreSQL
+
+PostgreSQL is the most popular non-WordPress database for good reason.
+
+- **Quality** - PostgreSQL's reputation is one of a rock-solid, well-maintained, and continually evolving database.
+- **Availability** - Nearly every cloud database provider offers PostgreSQL, and for custom servers, it is a package install away from being up and running.
+- **Efficiency** - PostgreSQL is very efficient, and its indexing of JSONB allows for quick access via any field in a document.
+- **Maintainability** - The terms "separation of concerns" and "locality of behavior" often compete within a code base, and separation of concerns often wins out; cluttering your logic with SQL can be less than optimal. Using this library, though, it may separate the concerns enough that the calls can be placed directly in the regular logic, providing one fewer place that must be looked up when tracing through the code.
+- **Simplicity** - SQL is a familiar language; even when writing manual queries against the data store created by this library, everything one knows about SQL applies, with [a few operators added][json-ops].
+- **Reliability** - The library has a full suite of tests against both the C# and F# APIs, [run against every supported PostgreSQL version][tests] to ensure the functionality provided is what is advertised.
+
+### SQLite
+
+The [SQLite "About" page][sqlite-about] has a short description of the project and its strengths. Simplicity, flexibility, and a large install base speak for themselves. A lot of people believe they will need a lot of features offered by server-based relational databases, and live with that complexity even when the project is small. A smarter move may be to build with SQLite; if the need arises for something more, the project is very likely a success!
+
+Many of the benefits listed for PostgreSQL apply here as well, including its test coverage, but SQLite removes the requirement to run it as a server!
+
+## Support
+
+Issues can be filed on the project's GitHub repository.
+
+
+[PostgreSQL]: https://www.postgresql.org/ "PostgreSQL"
+[SQLite]: https://sqlite.org/ "SQLite"
+[pkg-shield-pgsql]: https://img.shields.io/nuget/vpre/BitBadger.Documents.Postgres
+[pkg-link-pgsql]: https://www.nuget.org/packages/BitBadger.Documents.Postgres/ "BitBadger.Documents.Postgres • NuGet"
+[pkg-shield-sqlite]: https://img.shields.io/nuget/vpre/BitBadger.Documents.Sqlite
+[pkg-link-sqlite]: https://www.nuget.org/packages/BitBadger.Documents.Sqlite/ "BitBadger.Documents.Sqlite • NuGet"
+[Getting Started]: ./docs/getting-started.md "Getting Started • BitBadger.Documents"
+[Basic Usage]: ./docs/basic-usage.md "Basic Usage • BitBadger.Documents"
+[Advanced Usage]: ./docs/advanced/index.md "Advanced Usage • BitBadger.Documents"
+[v3v4]: ./docs/upgrade/v4.md "Upgrade from v3 to v4 • BitBadger.Documents"
+[v4rel]: https://git.bitbadger.solutions/bit-badger/BitBadger.Documents/releases/tag/v4 "Version 4 • Releases • BitBadger.Documents • Bit Badger Solutions Git"
+[v2v3]: ./docs/upgrade/v3.md "Upgrade from v2 to v3 • BitBadger.Documents"
+[v3rel]: https://git.bitbadger.solutions/bit-badger/BitBadger.Documents/releases/tag/v3 "Version 3 • Releases • BitBadger.Documents • Bit Badger Solutions Git"
+[v1v2]: ./docs/upgrade/v2.md "Upgrade from v1 to v2 • BitBadger.Documents"
+[v2rel]: https://github.com/bit-badger/BitBadger.Npgsql.Documents/releases/tag/v2 "Version 2 • Releases • BitBadger.Npgsql.Documents • GitHub"
+[MongoDB]: https://www.mongodb.com/ "MongoDB"
+[Npgsql.FSharp]: https://zaid-ajaj.github.io/Npgsql.FSharp/#/ "Npgsql.FSharp"
+[Litestream]: https://litestream.io/ "Litestream"
+[sqlite-about]: https://sqlite.org/about.html "About • SQLite"
+[json-ops]: https://www.postgresql.org/docs/15/functions-json.html#FUNCTIONS-JSON-OP-TABLE "JSON Functions and Operators • Documentation • PostgreSQL"
+[tests]: https://git.bitbadger.solutions/bit-badger/BitBadger.Documents/releases "Releases • BitBadger.Documents • Bit Badger Solutions Git"
diff --git a/src/Common/Library.fs b/src/Common/Library.fs
index 8bae732..78c517a 100644
--- a/src/Common/Library.fs
+++ b/src/Common/Library.fs
@@ -1,44 +1,45 @@
namespace BitBadger.Documents
open System.Security.Cryptography
+open System.Text
/// The types of comparisons available for JSON fields
///
type Comparison =
-
- /// Equals (=)
+
+ /// Equals (=)
| Equal of Value: obj
-
- /// Greater Than (>)
+
+ /// Greater Than (>)
| Greater of Value: obj
-
- /// Greater Than or Equal To (>=)
+
+ /// Greater Than or Equal To (>=)
| GreaterOrEqual of Value: obj
-
- /// Less Than (<)
+
+ /// Less Than (<)
| Less of Value: obj
-
- /// Less Than or Equal To (<=)
- | LessOrEqual of Value: obj
-
- /// Not Equal to (<>)
+
+ /// Less Than or Equal To (<=)
+ | LessOrEqual of Value: obj
+
+ /// Not Equal to (<>)
| NotEqual of Value: obj
-
- /// Between (BETWEEN)
+
+ /// Between (BETWEEN)
| Between of Min: obj * Max: obj
-
- /// In (IN)
+
+ /// In (IN)
| In of Values: obj seq
-
- /// In Array (PostgreSQL: |?, SQLite: EXISTS / json_each / IN)
+
+ /// In Array (PostgreSQL: |?, SQLite: EXISTS / json_each / IN)
| InArray of Table: string * Values: obj seq
-
- /// Exists (IS NOT NULL)
+
+ /// Exists (IS NOT NULL)
| Exists
-
- /// Does Not Exist (IS NULL)
+
+ /// Does Not Exist (IS NULL)
| NotExists
-
+
/// The operator SQL for this comparison
member this.OpSql =
match this with
@@ -50,7 +51,7 @@ type Comparison =
| NotEqual _ -> "<>"
| Between _ -> "BETWEEN"
| In _ -> "IN"
- | InArray _ -> "?|" // PostgreSQL only; SQL needs a subquery for this
+ | InArray _ -> "?|" // PostgreSQL only; SQL needs a subquery for this
| Exists -> "IS NOT NULL"
| NotExists -> "IS NULL"
@@ -62,120 +63,120 @@ type Dialect =
| SQLite
-/// The format in which an element of a JSON field should be extracted
+/// The format in which an element of a JSON field should be extracted
[]
type FieldFormat =
-
+
///
- /// Use ->> or #>>; extracts a text (PostgreSQL) or SQL (SQLite) value
+ /// Use ->> or #>>; extracts a text (PostgreSQL) or SQL (SQLite) value
///
| AsSql
-
- /// Use -> or #>; extracts a JSONB (PostgreSQL) or JSON (SQLite) value
+
+ /// Use -> or #>; extracts a JSONB (PostgreSQL) or JSON (SQLite) value
| AsJson
-/// Criteria for a field WHERE clause
+/// Criteria for a field WHERE clause
type Field = {
-
+
/// The name of the field
Name: string
-
+
/// The comparison for the field
Comparison: Comparison
-
+
/// The name of the parameter for this field
ParameterName: string option
-
+
/// The table qualifier for this field
Qualifier: string option
} with
-
+
/// Create a comparison against a field
/// The name of the field against which the comparison should be applied
/// The comparison for the given field
- /// A new Field instance implementing the given comparison
+ /// A new Field instance implementing the given comparison
static member Where name (comparison: Comparison) =
{ Name = name; Comparison = comparison; ParameterName = None; Qualifier = None }
-
- /// Create an equals (=) field criterion
+
+ /// Create an equals (=) field criterion
/// The name of the field to be compared
/// The value for the comparison
/// A field with the given comparison
static member Equal<'T> name (value: 'T) =
Field.Where name (Equal value)
-
- /// Create an equals (=) field criterion (alias)
+
+ /// Create an equals (=) field criterion (alias)
/// The name of the field to be compared
/// The value for the comparison
/// A field with the given comparison
static member EQ<'T> name (value: 'T) = Field.Equal name value
-
- /// Create a greater than (>) field criterion
+
+ /// Create a greater than (>) field criterion
/// The name of the field to be compared
/// The value for the comparison
/// A field with the given comparison
static member Greater<'T> name (value: 'T) =
Field.Where name (Greater value)
-
- /// Create a greater than (>) field criterion (alias)
+
+ /// Create a greater than (>) field criterion (alias)
/// The name of the field to be compared
/// The value for the comparison
/// A field with the given comparison
static member GT<'T> name (value: 'T) = Field.Greater name value
-
- /// Create a greater than or equal to (>=) field criterion
+
+ /// Create a greater than or equal to (>=) field criterion
/// The name of the field to be compared
/// The value for the comparison
/// A field with the given comparison
static member GreaterOrEqual<'T> name (value: 'T) =
Field.Where name (GreaterOrEqual value)
-
- /// Create a greater than or equal to (>=) field criterion (alias)
+
+ /// Create a greater than or equal to (>=) field criterion (alias)
/// The name of the field to be compared
/// The value for the comparison
/// A field with the given comparison
static member GE<'T> name (value: 'T) = Field.GreaterOrEqual name value
-
- /// Create a less than (<) field criterion
+
+ /// Create a less than (<) field criterion
/// The name of the field to be compared
/// The value for the comparison
/// A field with the given comparison
static member Less<'T> name (value: 'T) =
Field.Where name (Less value)
-
- /// Create a less than (<) field criterion (alias)
+
+ /// Create a less than (<) field criterion (alias)
/// The name of the field to be compared
/// The value for the comparison
/// A field with the given comparison
static member LT<'T> name (value: 'T) = Field.Less name value
-
- /// Create a less than or equal to (<=) field criterion
+
+ /// Create a less than or equal to (<=) field criterion
/// The name of the field to be compared
/// The value for the comparison
/// A field with the given comparison
static member LessOrEqual<'T> name (value: 'T) =
Field.Where name (LessOrEqual value)
-
- /// Create a less than or equal to (<=) field criterion (alias)
+
+ /// Create a less than or equal to (<=) field criterion (alias)
/// The name of the field to be compared
/// The value for the comparison
/// A field with the given comparison
static member LE<'T> name (value: 'T) = Field.LessOrEqual name value
-
- /// Create a not equals (<>) field criterion
+
+ /// Create a not equals (<>) field criterion
/// The name of the field to be compared
/// The value for the comparison
/// A field with the given comparison
static member NotEqual<'T> name (value: 'T) =
Field.Where name (NotEqual value)
-
- /// Create a not equals (<>) field criterion (alias)
+
+ /// Create a not equals (<>) field criterion (alias)
/// The name of the field to be compared
/// The value for the comparison
/// A field with the given comparison
static member NE<'T> name (value: 'T) = Field.NotEqual name value
-
+
/// Create a Between field criterion
/// The name of the field to be compared
/// The minimum value for the comparison range
@@ -183,27 +184,27 @@ type Field = {
/// A field with the given comparison
static member Between<'T> name (min: 'T) (max: 'T) =
Field.Where name (Between(min, max))
-
+
/// Create a Between field criterion (alias)
/// The name of the field to be compared
/// The minimum value for the comparison range
/// The maximum value for the comparison range
/// A field with the given comparison
static member BT<'T> name (min: 'T) (max: 'T) = Field.Between name min max
-
+
/// Create an In field criterion
/// The name of the field to be compared
/// The values for the comparison
/// A field with the given comparison
static member In<'T> name (values: 'T seq) =
Field.Where name (In (Seq.map box values))
-
+
/// Create an In field criterion (alias)
/// The name of the field to be compared
/// The values for the comparison
/// A field with the given comparison
static member IN<'T> name (values: 'T seq) = Field.In name values
-
+
/// Create an InArray field criterion
/// The name of the field to be compared
/// The name of the table in which the field's documents are stored
@@ -211,34 +212,34 @@ type Field = {
/// A field with the given comparison
static member InArray<'T> name tableName (values: 'T seq) =
Field.Where name (InArray(tableName, Seq.map box values))
-
- /// Create an exists (IS NOT NULL) field criterion
+
+ /// Create an exists (IS NOT NULL) field criterion
/// The name of the field to be compared
/// A field with the given comparison
static member Exists name =
Field.Where name Exists
-
- /// Create an exists (IS NOT NULL) field criterion (alias)
+
+ /// Create an exists (IS NOT NULL) field criterion (alias)
/// The name of the field to be compared
/// A field with the given comparison
static member EX name = Field.Exists name
-
- /// Create a not exists (IS NULL) field criterion
+
+ /// Create a not exists (IS NULL) field criterion
/// The name of the field to be compared
/// A field with the given comparison
static member NotExists name =
Field.Where name NotExists
-
- /// Create a not exists (IS NULL) field criterion (alias)
+
+ /// Create a not exists (IS NULL) field criterion (alias)
/// The name of the field to be compared
/// A field with the given comparison
static member NEX name = Field.NotExists name
-
- /// Transform a field name (a.b.c) to a path for the given SQL dialect
+
+ /// Transform a field name (a.b.c) to a path for the given SQL dialect
/// The name of the field in dotted format
/// The SQL dialect to use when converting the name to nested path format
/// Whether to reference this path as a JSON value or a SQL value
- /// A string with the path required to address the nested document value
+ /// A string with the path required to address the nested document value
static member NameToPath (name: string) dialect format =
let path =
if name.Contains '.' then
@@ -254,19 +255,19 @@ type Field = {
else
match format with AsJson -> $"->'{name}'" | AsSql -> $"->>'{name}'"
$"data{path}"
-
+
/// Create a field with a given name, but no other properties filled
/// The field name, along with any other qualifications if used in a sorting context
- /// Comparison will be Equal, value will be an empty string
+ /// Comparison will be Equal, value will be an empty string
static member Named name =
Field.Where name (Equal "")
-
+
/// Specify the name of the parameter for this field
- /// The parameter name (including : or @)
+ /// The parameter name (including : or @)
/// A field with the given parameter name specified
member this.WithParameterName name =
{ this with ParameterName = Some name }
-
+
/// Specify a qualifier (alias) for the table from which this field will be referenced
/// The table alias for this field comparison
/// A field with the given qualifier specified
@@ -276,7 +277,7 @@ type Field = {
/// Get the qualified path to the field
/// The SQL dialect to use when converting the name to nested path format
/// Whether to reference this path as a JSON value or a SQL value
- /// A string with the qualified path required to address the nested document value
+ /// A string with the qualified path required to address the nested document value
member this.Path dialect format =
(this.Qualifier |> Option.map (fun q -> $"{q}.") |> Option.defaultValue "")
+ Field.NameToPath this.Name dialect format
@@ -285,13 +286,13 @@ type Field = {
/// How fields should be matched
[]
type FieldMatch =
-
- /// Any field matches (OR)
+
+ /// Any field matches (OR)
| Any
-
- /// All fields match (AND)
+
+ /// All fields match (AND)
| All
-
+
/// The SQL value implementing each matching strategy
override this.ToString() =
match this with Any -> "OR" | All -> "AND"
@@ -299,10 +300,10 @@ type FieldMatch =
/// Derive parameter names (each instance wraps a counter to uniquely name anonymous fields)
type ParameterName() =
-
+
/// The counter for the next field value
let mutable currentIdx = -1
-
+
///
/// Return the specified name for the parameter, or an anonymous parameter name if none is specified
///
@@ -319,30 +320,30 @@ type ParameterName() =
/// Automatically-generated document ID strategies
[]
type AutoId =
-
+
/// No automatic IDs will be generated
| Disabled
-
+
/// Generate a MAX-plus-1 numeric value for documents
| Number
-
- /// Generate a GUID for each document (as a lowercase, no-dashes, 32-character string)
+
+ /// Generate a GUID for each document (as a lowercase, no-dashes, 32-character string)
| Guid
-
+
/// Generate a random string of hexadecimal characters for each document
| RandomString
with
- /// Generate a GUID string
- /// A GUID string
+ /// Generate a GUID string
+ /// A GUID string
static member GenerateGuid() =
System.Guid.NewGuid().ToString "N"
-
+
/// Generate a string of random hexadecimal characters
/// The number of characters to generate
/// A string of the given length with random hexadecimal characters
static member GenerateRandomString(length: int) =
RandomNumberGenerator.GetHexString(length, lowercase = true)
-
+
/// Does the given document need an automatic ID generated?
/// The auto-ID strategy currently in use
/// The document being inserted
@@ -387,26 +388,26 @@ with
/// The required document serialization implementation
type IDocumentSerializer =
-
+
/// Serialize an object to a JSON string
abstract Serialize<'T> : 'T -> string
-
+
/// Deserialize a JSON string into an object
abstract Deserialize<'T> : string -> 'T
/// Document serializer defaults
module DocumentSerializer =
-
+
open System.Text.Json
open System.Text.Json.Serialization
-
+
/// The default JSON serializer options to use with the stock serializer
let private jsonDefaultOpts =
let o = JsonSerializerOptions()
o.Converters.Add(JsonFSharpConverter())
o
-
+
/// The default JSON serializer
[]
let ``default`` =
@@ -424,7 +425,7 @@ module Configuration =
/// The serializer to use for document manipulation
let mutable private serializerValue = DocumentSerializer.``default``
-
+
/// Register a serializer to use for translating documents to domain types
/// The serializer to use when manipulating documents
[]
@@ -436,46 +437,46 @@ module Configuration =
[]
let serializer () =
serializerValue
-
+
/// The serialized name of the ID field for documents
let mutable private idFieldValue = "Id"
-
+
/// Specify the name of the ID field for documents
/// The name of the ID field for documents
[]
let useIdField it =
idFieldValue <- it
-
+
/// Retrieve the currently configured ID field for documents
/// The currently configured ID field
[]
let idField () =
idFieldValue
-
+
/// The automatic ID strategy used by the library
let mutable private autoIdValue = Disabled
-
+
/// Specify the automatic ID generation strategy used by the library
/// The automatic ID generation strategy to use
[]
let useAutoIdStrategy it =
autoIdValue <- it
-
+
/// Retrieve the currently configured automatic ID generation strategy
/// The current automatic ID generation strategy
[]
let autoIdStrategy () =
autoIdValue
-
+
/// The length of automatically generated random strings
let mutable private idStringLengthValue = 16
-
+
/// Specify the length of automatically generated random strings
/// The length of automatically generated random strings
[]
let useIdStringLength length =
idStringLengthValue <- length
-
+
/// Retrieve the currently configured length of automatically generated random strings
/// The current length of automatically generated random strings
[]
@@ -486,31 +487,31 @@ module Configuration =
/// Query construction functions
[]
module Query =
-
- /// Combine a query (SELECT, UPDATE, etc.) and a WHERE clause
+
+ /// Combine a query (SELECT, UPDATE, etc.) and a WHERE clause
/// The first part of the statement
- /// The WHERE clause for the statement
- /// The two parts of the query combined with WHERE
+ /// The WHERE clause for the statement
+ /// The two parts of the query combined with WHERE
[]
let statementWhere statement where =
$"%s{statement} WHERE %s{where}"
-
+
/// Queries to define tables and indexes
module Definition =
-
+
/// SQL statement to create a document table
/// The name of the table to create (may include schema)
- /// The type of data for the column (JSON, JSONB, etc.)
+ /// The type of data for the column (JSON, JSONB, etc.)
/// A query to create a document table
[]
let ensureTableFor name dataType =
$"CREATE TABLE IF NOT EXISTS %s{name} (data %s{dataType} NOT NULL)"
-
+
/// Split a schema and table name
let private splitSchemaAndTable (tableName: string) =
let parts = tableName.Split '.'
if Array.length parts = 1 then "", tableName else parts[0], parts[1]
-
+
/// SQL statement to create an index on one or more fields in a JSON document
/// The table on which an index should be created (may include schema)
/// The name of the index to be created
@@ -537,7 +538,7 @@ module Query =
[]
let ensureKey tableName dialect =
(ensureIndexOn tableName "key" [ Configuration.idField () ] dialect).Replace("INDEX", "UNIQUE INDEX")
-
+
/// Query to insert a document
/// The table into which to insert (may include schema)
/// A query to insert a document
@@ -554,48 +555,48 @@ module Query =
let save tableName =
sprintf
"INSERT INTO %s VALUES (@data) ON CONFLICT ((data->>'%s')) DO UPDATE SET data = EXCLUDED.data"
- tableName (Configuration.idField ())
-
+ tableName (Configuration.idField ())
+
/// Query to count documents in a table
/// The table in which to count documents (may include schema)
/// A query to count documents
- /// This query has no WHERE clause
+ /// This query has no WHERE clause
[]
let count tableName =
$"SELECT COUNT(*) AS it FROM %s{tableName}"
-
+
/// Query to check for document existence in a table
/// The table in which existence should be checked (may include schema)
- /// The WHERE clause with the existence criteria
+ /// The WHERE clause with the existence criteria
/// A query to check document existence
[]
let exists tableName where =
$"SELECT EXISTS (SELECT 1 FROM %s{tableName} WHERE %s{where}) AS it"
-
+
/// Query to select documents from a table
/// The table from which documents should be found (may include schema)
/// A query to retrieve documents
- /// This query has no WHERE clause
+ /// This query has no WHERE clause
[]
let find tableName =
$"SELECT data FROM %s{tableName}"
-
+
/// Query to update (replace) a document
/// The table in which documents should be replaced (may include schema)
/// A query to update documents
- /// This query has no WHERE clause
+ /// This query has no WHERE clause
[]
let update tableName =
$"UPDATE %s{tableName} SET data = @data"
-
+
/// Query to delete documents from a table
/// The table in which documents should be deleted (may include schema)
/// A query to delete documents
- /// This query has no WHERE clause
+ /// This query has no WHERE clause
[]
let delete tableName =
$"DELETE FROM %s{tableName}"
-
+
/// Create a SELECT clause to retrieve the document data from the given table
/// The table from which documents should be found (may include schema)
/// A query to retrieve documents
@@ -603,11 +604,11 @@ module Query =
[]
let selectFromTable tableName =
find tableName
-
- /// Create an ORDER BY clause for the given fields
+
+ /// Create an ORDER BY clause for the given fields
/// One or more fields by which to order
/// The SQL dialect for the generated clause
- /// An ORDER BY clause for the given fields
+ /// An ORDER BY clause for the given fields
[]
let orderBy fields dialect =
if Seq.isEmpty fields then ""
@@ -631,3 +632,49 @@ module Query =
|> function path -> path + defaultArg direction "")
|> String.concat ", "
|> function it -> $" ORDER BY {it}"
+
+
+#nowarn "FS3511" // "let rec" is not statically compilable
+
+open System.IO.Pipelines
+
+/// Functions that manipulate PipeWriters
+[]
+module PipeWriter =
+
+ /// Write a UTF-8 string to this pipe
+ /// The PipeWriter to which the string should be written
+ /// The string to be written to the pipe
+ /// true if the pipe is still open, false if not
+ []
+ let writeString (writer: PipeWriter) (text: string) = backgroundTask {
+ try
+ let! writeResult = writer.WriteAsync(Encoding.UTF8.GetBytes text)
+ return not writeResult.IsCompleted
+ with :? System.ObjectDisposedException -> return false
+ }
+
+ /// Write an array of strings, abandoning the sequence if the pipe is closed
+ /// The PipeWriter to which the strings should be written
+ /// The strings to be written
+ /// true if the pipe is still open, false if not
+ []
+ let writeStrings writer items = backgroundTask {
+ let theItems = Seq.cache items
+ let rec writeNext idx = backgroundTask {
+ match theItems |> Seq.tryItem idx with
+ | Some item ->
+ if idx > 0 then
+ let! _ = writeString writer ","
+ ()
+ match! writeString writer item with
+ | true -> return! writeNext (idx + 1)
+ | false -> return false
+ | None -> return true
+ }
+ let! _ = writeString writer "["
+ let! isCleanFinish = writeNext 0
+ if isCleanFinish then
+ let! _ = writeString writer "]"
+ ()
+ }
diff --git a/src/Common/README.md b/src/Common/README.md
index c0107b8..ba3aa52 100644
--- a/src/Common/README.md
+++ b/src/Common/README.md
@@ -8,11 +8,11 @@ This package provides common definitions and functionality for `BitBadger.Docume
- Select, insert, update, save (upsert), delete, count, and check existence of documents, and create tables and indexes for these documents
- Automatically generate IDs for documents (numeric IDs, GUIDs, or random strings)
-- Addresses documents via ID and via comparison on any field (for PostgreSQL, also via equality on any property by using JSON containment, or via condition on any property using JSON Path queries)
-- Accesses documents as your domain models (POCOs)
-- Uses `Task`-based async for all data access functions
-- Uses building blocks for more complex queries
+- Address documents via ID and via comparison on any field (for PostgreSQL, also via equality on any property by using JSON containment, or via condition on any property using JSON Path queries)
+- Access documents as your domain models (POCOs), as JSON strings, or as JSON written directly to a `PipeWriter`
+- Use `Task`-based async for all data access functions
+- Use building blocks for more complex queries
## Getting Started
-Install the library of your choice and follow its README; also, the [project site](https://bitbadger.solutions/open-source/relational-documents/) has complete documentation.
+Install the library of your choice and follow its README; also, the [project site](https://relationaldocs.bitbadger.solutions/dotnet/) has complete documentation.
diff --git a/src/Directory.Build.props b/src/Directory.Build.props
index 932d9dd..68d9ff6 100644
--- a/src/Directory.Build.props
+++ b/src/Directory.Build.props
@@ -3,16 +3,15 @@
net8.0;net9.0
embedded
true
- 4.0.1.0
- 4.0.1.0
- 4.0.1
- From v4.0: Add XML documention (IDE support)
-From v3.1: See 4.0 release for breaking changes and compatibility
+ 4.1.0.0
+ 4.1.0.0
+ 4.1.0
+ Add JSON retrieval and pipe-writing functions; update project URL to site with public API docs
danieljsummers
Bit Badger Solutions
README.md
icon.png
- https://bitbadger.solutions/open-source/relational-documents/
+ https://relationaldocs.bitbadger.solutions/dotnet/
false
https://git.bitbadger.solutions/bit-badger/BitBadger.Documents
Git
diff --git a/src/Postgres/Extensions.fs b/src/Postgres/Extensions.fs
index 568b51b..ebd9229 100644
--- a/src/Postgres/Extensions.fs
+++ b/src/Postgres/Extensions.fs
@@ -18,14 +18,38 @@ module Extensions =
member conn.customList<'TDoc> query parameters (mapFunc: RowReader -> 'TDoc) =
Custom.list<'TDoc> query parameters mapFunc (Sql.existingConnection conn)
+ /// Execute a query that returns a JSON array of results
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The mapping function to extract the document
+ /// A JSON array of results for the given query
+ member conn.customJsonArray query parameters mapFunc =
+ Custom.jsonArray query parameters mapFunc (Sql.existingConnection conn)
+
+ /// Execute a query, writing its results to the given PipeWriter
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The PipeWriter to which the results should be written
+ /// The mapping function to extract the document
+ member conn.writeCustomJsonArray query parameters writer mapFunc =
+ Custom.writeJsonArray query parameters writer mapFunc (Sql.existingConnection conn)
+
/// Execute a query that returns one or no results
/// The query to retrieve the results
/// Parameters to use for the query
/// The mapping function between the document and the domain item
- /// Some with the first matching result, or None if not found
+ /// Some with the first matching result, or None if not found
member conn.customSingle<'TDoc> query parameters (mapFunc: RowReader -> 'TDoc) =
Custom.single<'TDoc> query parameters mapFunc (Sql.existingConnection conn)
+ /// Execute a query that returns one or no JSON documents
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The mapping function to extract the document
+ /// The JSON document with the first matching result, or an empty document if not found
+ member conn.customJsonSingle query parameters mapFunc =
+ Custom.jsonSingle query parameters mapFunc (Sql.existingConnection conn)
+
/// Execute a query that returns no results
/// The query to retrieve the results
/// Parameters to use for the query
@@ -78,7 +102,7 @@ module Extensions =
member conn.countAll tableName =
Count.all tableName (Sql.existingConnection conn)
- /// Count matching documents using JSON field comparisons (->> =, etc.)
+ /// Count matching documents using JSON field comparisons (->> =, etc.)
/// The table in which documents should be counted (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
@@ -86,14 +110,14 @@ module Extensions =
member conn.countByFields tableName howMatched fields =
Count.byFields tableName howMatched fields (Sql.existingConnection conn)
- /// Count matching documents using a JSON containment query (@>)
+ /// Count matching documents using a JSON containment query (@>)
/// The table in which documents should be counted (may include schema)
/// The document to match with the containment query
/// The count of the documents in the table
member conn.countByContains tableName criteria =
Count.byContains tableName criteria (Sql.existingConnection conn)
- /// Count matching documents using a JSON Path match query (@?)
+ /// Count matching documents using a JSON Path match query (@?)
/// The table in which documents should be counted (may include schema)
/// The JSON Path expression to be matched
/// The count of the documents in the table
@@ -107,7 +131,7 @@ module Extensions =
member conn.existsById tableName docId =
Exists.byId tableName docId (Sql.existingConnection conn)
- /// Determine if a document exists using JSON field comparisons (->> =, etc.)
+ /// Determine if a document exists using JSON field comparisons (->> =, etc.)
/// The table in which existence should be checked (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
@@ -115,14 +139,14 @@ module Extensions =
member conn.existsByFields tableName howMatched fields =
Exists.byFields tableName howMatched fields (Sql.existingConnection conn)
- /// Determine if a document exists using a JSON containment query (@>)
+ /// Determine if a document exists using a JSON containment query (@>)
/// The table in which existence should be checked (may include schema)
/// The document to match with the containment query
/// True if any matching documents exist, false if not
member conn.existsByContains tableName criteria =
Exists.byContains tableName criteria (Sql.existingConnection conn)
- /// Determine if a document exists using a JSON Path match query (@?)
+ /// Determine if a document exists using a JSON Path match query (@?)
/// The table in which existence should be checked (may include schema)
/// The JSON Path expression to be matched
/// True if any matching documents exist, false if not
@@ -145,11 +169,11 @@ module Extensions =
/// Retrieve a document by its ID
/// The table from which a document should be retrieved (may include schema)
/// The ID of the document to retrieve
- /// Some with the document if found, None otherwise
+ /// Some with the document if found, None otherwise
member conn.findById<'TKey, 'TDoc> tableName docId =
Find.byId<'TKey, 'TDoc> tableName docId (Sql.existingConnection conn)
- /// Retrieve documents matching JSON field comparisons (->> =, etc.)
+ /// Retrieve documents matching JSON field comparisons (->> =, etc.)
/// The table from which documents should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
@@ -158,8 +182,8 @@ module Extensions =
Find.byFields<'TDoc> tableName howMatched fields (Sql.existingConnection conn)
///
- /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields
- /// in the document
+ /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in
+ /// the document
///
/// The table from which documents should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
@@ -170,7 +194,7 @@ module Extensions =
Find.byFieldsOrdered<'TDoc>
tableName howMatched queryFields orderFields (Sql.existingConnection conn)
- /// Retrieve documents matching a JSON containment query (@>)
+ /// Retrieve documents matching a JSON containment query (@>)
/// The table from which documents should be retrieved (may include schema)
/// The document to match with the containment query
/// All documents matching the given containment query
@@ -178,7 +202,7 @@ module Extensions =
Find.byContains<'TDoc> tableName criteria (Sql.existingConnection conn)
///
- /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the
+ /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the
/// document
///
/// The table from which documents should be retrieved (may include schema)
@@ -188,7 +212,7 @@ module Extensions =
member conn.findByContainsOrdered<'TDoc> tableName (criteria: obj) orderFields =
Find.byContainsOrdered<'TDoc> tableName criteria orderFields (Sql.existingConnection conn)
- /// Retrieve documents matching a JSON Path match query (@?)
+ /// Retrieve documents matching a JSON Path match query (@?)
/// The table from which documents should be retrieved (may include schema)
/// The JSON Path expression to match
/// All documents matching the given JSON Path expression
@@ -196,8 +220,7 @@ module Extensions =
Find.byJsonPath<'TDoc> tableName jsonPath (Sql.existingConnection conn)
///
- /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the
- /// document
+ /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document
///
/// The table from which documents should be retrieved (may include schema)
/// The JSON Path expression to match
@@ -206,69 +229,356 @@ module Extensions =
member conn.findByJsonPathOrdered<'TDoc> tableName jsonPath orderFields =
Find.byJsonPathOrdered<'TDoc> tableName jsonPath orderFields (Sql.existingConnection conn)
- /// Retrieve the first document matching JSON field comparisons (->> =, etc.)
+ /// Retrieve the first document matching JSON field comparisons (->> =, etc.)
/// The table from which a document should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
- /// Some with the first document, or None if not found
+ /// Some with the first document, or None if not found
member conn.findFirstByFields<'TDoc> tableName howMatched fields =
Find.firstByFields<'TDoc> tableName howMatched fields (Sql.existingConnection conn)
///
- /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the
- /// given fields in the document
+ /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given
+ /// fields in the document
///
/// The table from which a document should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
/// Fields by which the results should be ordered
///
- /// Some with the first document ordered by the given fields, or None if not found
+ /// Some with the first document ordered by the given fields, or None if not found
///
member conn.findFirstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields =
Find.firstByFieldsOrdered<'TDoc>
tableName howMatched queryFields orderFields (Sql.existingConnection conn)
- /// Retrieve the first document matching a JSON containment query (@>)
+ /// Retrieve the first document matching a JSON containment query (@>)
/// The table from which a document should be retrieved (may include schema)
/// The document to match with the containment query
- /// Some with the first document, or None if not found
+ /// Some with the first document, or None if not found
member conn.findFirstByContains<'TDoc> tableName (criteria: obj) =
Find.firstByContains<'TDoc> tableName criteria (Sql.existingConnection conn)
///
- /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields
- /// in the document
+ /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in
+ /// the document
///
/// The table from which a document should be retrieved (may include schema)
/// The document to match with the containment query
/// Fields by which the results should be ordered
///
- /// Some with the first document ordered by the given fields, or None if not found
+ /// Some with the first document ordered by the given fields, or None if not found
///
member conn.findFirstByContainsOrdered<'TDoc> tableName (criteria: obj) orderFields =
Find.firstByContainsOrdered<'TDoc> tableName criteria orderFields (Sql.existingConnection conn)
- /// Retrieve the first document matching a JSON Path match query (@?)
+ /// Retrieve the first document matching a JSON Path match query (@?)
/// The table from which a document should be retrieved (may include schema)
/// The JSON Path expression to match
- /// Some with the first document, or None if not found
+ /// Some with the first document, or None if not found
member conn.findFirstByJsonPath<'TDoc> tableName jsonPath =
Find.firstByJsonPath<'TDoc> tableName jsonPath (Sql.existingConnection conn)
///
- /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in
- /// the document
+ /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the
+ /// document
///
/// The table from which a document should be retrieved (may include schema)
/// The JSON Path expression to match
/// Fields by which the results should be ordered
///
- /// Some with the first document ordered by the given fields, or None if not found
+ /// Some with the first document ordered by the given fields, or None if not found
///
member conn.findFirstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields =
Find.firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields (Sql.existingConnection conn)
+ /// Retrieve all documents in the given table as a JSON array
+ /// The table from which documents should be retrieved (may include schema)
+ /// All documents from the given table as a JSON array
+ member conn.jsonAll tableName =
+ Json.all tableName (Sql.existingConnection conn)
+
+ /// Write all documents in the given table to the given PipeWriter
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ member conn.writeJsonAll tableName writer =
+ Json.writeAll tableName writer (Sql.existingConnection conn)
+
+ ///
+ /// Retrieve all documents in the given table as a JSON array, ordered by the given fields in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// Fields by which the results should be ordered
+ /// All documents from the given table as a JSON array, ordered by the given fields
+ member conn.jsonAllOrdered tableName orderFields =
+ Json.allOrdered tableName orderFields (Sql.existingConnection conn)
+
+ ///
+ /// Write all documents in the given table to the given PipeWriter, ordered by the given fields in the
+ /// document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Fields by which the results should be ordered
+ member conn.writeJsonAllOrdered tableName writer orderFields =
+ Json.writeAllOrdered tableName writer orderFields (Sql.existingConnection conn)
+
+ /// Retrieve a JSON document by its ID
+ /// The table from which a document should be retrieved (may include schema)
+ /// The ID of the document to retrieve
+ /// The JSON document if found, an empty JSON document otherwise
+ member conn.jsonById<'TKey> tableName (docId: 'TKey) =
+ Json.byId tableName docId (Sql.existingConnection conn)
+
+ /// Write a JSON document to the given PipeWriter by its ID
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The ID of the document to retrieve
+ member conn.writeJsonById<'TKey> tableName writer (docId: 'TKey) =
+ Json.writeById tableName writer docId (Sql.existingConnection conn)
+
+ /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.)
+ /// The table from which documents should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// All JSON documents matching the given fields
+ member conn.jsonByFields tableName howMatched fields =
+ Json.byFields tableName howMatched fields (Sql.existingConnection conn)
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =,
+ /// etc.)
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ member conn.writeJsonByFields tableName writer howMatched fields =
+ Json.writeByFields tableName writer howMatched fields (Sql.existingConnection conn)
+
+ ///
+ /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given
+ /// fields in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ /// All JSON documents matching the given fields, ordered by the other given fields
+ member conn.jsonByFieldsOrdered tableName howMatched queryFields orderFields =
+ Json.byFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn)
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =,
+ /// etc.) ordered by the given fields in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ member conn.writeJsonByFieldsOrdered tableName writer howMatched queryFields orderFields =
+ Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields (Sql.existingConnection conn)
+
+ /// Retrieve JSON documents matching a JSON containment query (@>)
+ /// The table from which documents should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// All JSON documents matching the given containment query
+ member conn.jsonByContains tableName (criteria: obj) =
+ Json.byContains tableName criteria (Sql.existingConnection conn)
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>)
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ member conn.writeJsonByContains tableName writer (criteria: obj) =
+ Json.writeByContains tableName writer criteria (Sql.existingConnection conn)
+
+ ///
+ /// Retrieve JSON documents matching a JSON containment query (@>) ordered by the given fields in the
+ /// document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ /// All documents matching the given containment query, ordered by the given fields
+ member conn.jsonByContainsOrdered tableName (criteria: obj) orderFields =
+ Json.byContainsOrdered tableName criteria orderFields (Sql.existingConnection conn)
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) ordered
+ /// by the given fields in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ member conn.writeJsonByContainsOrdered tableName writer (criteria: obj) orderFields =
+ Json.writeByContainsOrdered tableName writer criteria orderFields (Sql.existingConnection conn)
+
+ /// Retrieve JSON documents matching a JSON Path match query (@?)
+ /// The table from which documents should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// All JSON documents matching the given JSON Path expression
+ member conn.jsonByJsonPath tableName jsonPath =
+ Json.byJsonPath tableName jsonPath (Sql.existingConnection conn)
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?)
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The JSON Path expression to match
+ member conn.writeJsonByJsonPath tableName writer jsonPath =
+ Json.writeByJsonPath tableName writer jsonPath (Sql.existingConnection conn)
+
+ ///
+ /// Retrieve JSON documents matching a JSON Path match query (@?) ordered by the given fields in the
+ /// document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ /// All JSON documents matching the given JSON Path expression, ordered by the given fields
+ member conn.jsonByJsonPathOrdered tableName jsonPath orderFields =
+ Json.byJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn)
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) ordered by
+ /// the given fields in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ member conn.writeJsonByJsonPathOrdered tableName writer jsonPath orderFields =
+ Json.writeByJsonPathOrdered tableName writer jsonPath orderFields (Sql.existingConnection conn)
+
+ ///
+ /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.)
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ member conn.jsonFirstByFields tableName howMatched fields =
+ Json.firstByFields tableName howMatched fields (Sql.existingConnection conn)
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching JSON field comparisons
+ /// (->> =, etc.)
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ member conn.writeJsonFirstByFields tableName writer howMatched fields =
+ Json.writeFirstByFields tableName writer howMatched fields (Sql.existingConnection conn)
+
+ ///
+ /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the
+ /// given fields in the document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ member conn.jsonFirstByFieldsOrdered tableName howMatched queryFields orderFields =
+ Json.firstByFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn)
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching JSON field comparisons
+ /// (->> =, etc.) ordered by the given fields in the document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ member conn.writeJsonFirstByFieldsOrdered tableName writer howMatched queryFields orderFields =
+ Json.writeFirstByFieldsOrdered
+ tableName writer howMatched queryFields orderFields (Sql.existingConnection conn)
+
+ /// Retrieve the first JSON document matching a JSON containment query (@>)
+ /// The table from which a document should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ member conn.jsonFirstByContains tableName (criteria: obj) =
+ Json.firstByContains tableName criteria (Sql.existingConnection conn)
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching a JSON containment query
+ /// (@>)
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ member conn.writeJsonFirstByContains tableName writer (criteria: obj) =
+ Json.writeFirstByContains tableName writer criteria (Sql.existingConnection conn)
+
+ ///
+ /// Retrieve the first JSON document matching a JSON containment query (@>) ordered by the given
+ /// fields in the document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ member conn.jsonFirstByContainsOrdered tableName (criteria: obj) orderFields =
+ Json.firstByContainsOrdered tableName criteria orderFields (Sql.existingConnection conn)
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching a JSON containment query
+ /// (@>) ordered by the given fields in the document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ member conn.writeJsonFirstByContainsOrdered tableName writer (criteria: obj) orderFields =
+ Json.writeFirstByContainsOrdered tableName writer criteria orderFields (Sql.existingConnection conn)
+
+ /// Retrieve the first JSON document matching a JSON Path match query (@?)
+ /// The table from which a document should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ member conn.jsonFirstByJsonPath tableName jsonPath =
+ Json.firstByJsonPath tableName jsonPath (Sql.existingConnection conn)
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?)
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The JSON Path expression to match
+ member conn.writeJsonFirstByJsonPath tableName writer jsonPath =
+ Json.writeFirstByJsonPath tableName writer jsonPath (Sql.existingConnection conn)
+
+ ///
+ /// Retrieve the first JSON document matching a JSON Path match query (@?) ordered by the given fields
+ /// in the document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ member conn.jsonFirstByJsonPathOrdered tableName jsonPath orderFields =
+ Json.firstByJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn)
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?)
+ /// ordered by the given fields in the document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ member conn.writeJsonFirstByJsonPathOrdered tableName writer jsonPath orderFields =
+ Json.writeFirstByJsonPathOrdered tableName writer jsonPath orderFields (Sql.existingConnection conn)
+
/// Update (replace) an entire document by its ID
/// The table in which a document should be updated (may include schema)
/// The ID of the document to be updated (replaced)
@@ -294,7 +604,7 @@ module Extensions =
Patch.byId tableName docId patch (Sql.existingConnection conn)
///
- /// Patch documents using a JSON field comparison query in the WHERE clause (->> =,
+ /// Patch documents using a JSON field comparison query in the WHERE clause (->> =,
/// etc.)
///
/// The table in which documents should be patched (may include schema)
@@ -305,7 +615,7 @@ module Extensions =
Patch.byFields tableName howMatched fields patch (Sql.existingConnection conn)
///
- /// Patch documents using a JSON containment query in the WHERE clause (@>)
+ /// Patch documents using a JSON containment query in the WHERE clause (@>)
///
/// The table in which documents should be patched (may include schema)
/// The document to match the containment query
@@ -313,7 +623,7 @@ module Extensions =
member conn.patchByContains tableName (criteria: 'TCriteria) (patch: 'TPatch) =
Patch.byContains tableName criteria patch (Sql.existingConnection conn)
- /// Patch documents using a JSON Path match query in the WHERE clause (@?)
+ /// Patch documents using a JSON Path match query in the WHERE clause (@?)
/// The table in which documents should be patched (may include schema)
/// The JSON Path expression to match
/// The partial document to patch the existing document
@@ -335,14 +645,14 @@ module Extensions =
member conn.removeFieldsByFields tableName howMatched fields fieldNames =
RemoveFields.byFields tableName howMatched fields fieldNames (Sql.existingConnection conn)
- /// Remove fields from documents via a JSON containment query (@>)
+ /// Remove fields from documents via a JSON containment query (@>)
/// The table in which documents should be modified (may include schema)
/// The document to match the containment query
/// One or more field names to remove from the matching documents
member conn.removeFieldsByContains tableName (criteria: 'TContains) fieldNames =
RemoveFields.byContains tableName criteria fieldNames (Sql.existingConnection conn)
- /// Remove fields from documents via a JSON Path match query (@?)
+ /// Remove fields from documents via a JSON Path match query (@?)
/// The table in which documents should be modified (may include schema)
/// The JSON Path expression to match
/// One or more field names to remove from the matching documents
@@ -355,14 +665,14 @@ module Extensions =
member conn.deleteById tableName (docId: 'TKey) =
Delete.byId tableName docId (Sql.existingConnection conn)
- /// Delete documents by matching a JSON field comparison query (->> =, etc.)
+ /// Delete documents by matching a JSON field comparison query (->> =, etc.)
/// The table in which documents should be deleted (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
member conn.deleteByFields tableName howMatched fields =
Delete.byFields tableName howMatched fields (Sql.existingConnection conn)
- /// Delete documents by matching a JSON contains query (@>)
+ /// Delete documents by matching a JSON contains query (@>)
/// The table in which documents should be deleted (may include schema)
/// The document to match the containment query
member conn.deleteByContains tableName (criteria: 'TContains) =
@@ -381,7 +691,7 @@ open System.Runtime.CompilerServices
type NpgsqlConnectionCSharpExtensions =
/// Execute a query that returns a list of results
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The query to retrieve the results
/// Parameters to use for the query
/// The mapping function between the document and the domain item
@@ -390,19 +700,49 @@ type NpgsqlConnectionCSharpExtensions =
static member inline CustomList<'TDoc>(conn, query, parameters, mapFunc: System.Func) =
Custom.List<'TDoc>(query, parameters, mapFunc, Sql.existingConnection conn)
+ /// Execute a query that returns a JSON array of results
+ /// The NpgsqlConnection on which to run the query
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The mapping function to extract the document
+ /// A JSON array of results for the given query
+ []
+ static member inline CustomJsonArray(conn, query, parameters, mapFunc) =
+ Custom.JsonArray(query, parameters, mapFunc, Sql.existingConnection conn)
+
+ /// Execute a query, writing its results to the given PipeWriter
+ /// The NpgsqlConnection on which to run the query
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The PipeWriter to which the results should be written
+ /// The mapping function to extract the document
+ []
+ static member inline WriteCustomJsonArray(conn, query, parameters, writer, mapFunc) =
+ Custom.WriteJsonArray(query, parameters, writer, mapFunc, Sql.existingConnection conn)
+
/// Execute a query that returns one or no results
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The query to retrieve the results
/// Parameters to use for the query
/// The mapping function between the document and the domain item
- /// The first matching result, or null if not found
+ /// The first matching result, or null if not found
[]
static member inline CustomSingle<'TDoc when 'TDoc: null and 'TDoc: not struct>(
conn, query, parameters, mapFunc: System.Func) =
Custom.Single<'TDoc>(query, parameters, mapFunc, Sql.existingConnection conn)
+ /// Execute a query that returns one or no JSON documents
+ /// The NpgsqlConnection on which to run the query
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The mapping function to extract the document
+ /// The JSON document with the first matching result, or an empty document if not found
+ []
+ static member inline CustomJsonSingle(conn, query, parameters, mapFunc) =
+ Custom.JsonSingle(query, parameters, mapFunc, Sql.existingConnection conn)
+
/// Execute a query that returns no results
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The query to retrieve the results
/// Parameters to use for the query
[]
@@ -410,7 +750,7 @@ type NpgsqlConnectionCSharpExtensions =
Custom.nonQuery query parameters (Sql.existingConnection conn)
/// Execute a query that returns a scalar value
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The query to retrieve the value
/// Parameters to use for the query
/// The mapping function to obtain the value
@@ -421,14 +761,14 @@ type NpgsqlConnectionCSharpExtensions =
Custom.Scalar(query, parameters, mapFunc, Sql.existingConnection conn)
/// Create a document table
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table whose existence should be ensured (may include schema)
[]
static member inline EnsureTable(conn, name) =
Definition.ensureTable name (Sql.existingConnection conn)
/// Create an index on documents in the specified table
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table to be indexed (may include schema)
/// The type of document index to create
[]
@@ -436,7 +776,7 @@ type NpgsqlConnectionCSharpExtensions =
Definition.ensureDocumentIndex name idxType (Sql.existingConnection conn)
/// Create an index on field(s) within documents in the specified table
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table to be indexed (may include schema)
/// The name of the index to create
/// One or more fields to be indexed
@@ -445,7 +785,7 @@ type NpgsqlConnectionCSharpExtensions =
Definition.ensureFieldIndex tableName indexName fields (Sql.existingConnection conn)
/// Insert a new document
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table into which the document should be inserted (may include schema)
/// The document to be inserted
[]
@@ -453,7 +793,7 @@ type NpgsqlConnectionCSharpExtensions =
insert<'TDoc> tableName document (Sql.existingConnection conn)
/// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert")
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table into which the document should be saved (may include schema)
/// The document to be saved
[]
@@ -461,15 +801,15 @@ type NpgsqlConnectionCSharpExtensions =
save<'TDoc> tableName document (Sql.existingConnection conn)
/// Count all documents in a table
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table in which documents should be counted (may include schema)
/// The count of the documents in the table
[]
static member inline CountAll(conn, tableName) =
Count.all tableName (Sql.existingConnection conn)
- /// Count matching documents using JSON field comparisons (->> =, etc.)
- /// The NpgsqlConnection on which to run the query
+ /// Count matching documents using JSON field comparisons (->> =, etc.)
+ /// The NpgsqlConnection on which to run the query
/// The table in which documents should be counted (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
@@ -478,8 +818,8 @@ type NpgsqlConnectionCSharpExtensions =
static member inline CountByFields(conn, tableName, howMatched, fields) =
Count.byFields tableName howMatched fields (Sql.existingConnection conn)
- /// Count matching documents using a JSON containment query (@>)
- /// The NpgsqlConnection on which to run the query
+ /// Count matching documents using a JSON containment query (@>)
+ /// The NpgsqlConnection on which to run the query
/// The table in which documents should be counted (may include schema)
/// The document to match with the containment query
/// The count of the documents in the table
@@ -487,8 +827,8 @@ type NpgsqlConnectionCSharpExtensions =
static member inline CountByContains(conn, tableName, criteria: 'TCriteria) =
Count.byContains tableName criteria (Sql.existingConnection conn)
- /// Count matching documents using a JSON Path match query (@?)
- /// The NpgsqlConnection on which to run the query
+ /// Count matching documents using a JSON Path match query (@?)
+ /// The NpgsqlConnection on which to run the query
/// The table in which documents should be counted (may include schema)
/// The JSON Path expression to be matched
/// The count of the documents in the table
@@ -497,7 +837,7 @@ type NpgsqlConnectionCSharpExtensions =
Count.byJsonPath tableName jsonPath (Sql.existingConnection conn)
/// Determine if a document exists for the given ID
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table in which existence should be checked (may include schema)
/// The ID of the document whose existence should be checked
/// True if a document exists, false if not
@@ -505,8 +845,8 @@ type NpgsqlConnectionCSharpExtensions =
static member inline ExistsById(conn, tableName, docId) =
Exists.byId tableName docId (Sql.existingConnection conn)
- /// Determine if a document exists using JSON field comparisons (->> =, etc.)
- /// The NpgsqlConnection on which to run the query
+ /// Determine if a document exists using JSON field comparisons (->> =, etc.)
+ /// The NpgsqlConnection on which to run the query
/// The table in which existence should be checked (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
@@ -515,8 +855,8 @@ type NpgsqlConnectionCSharpExtensions =
static member inline ExistsByFields(conn, tableName, howMatched, fields) =
Exists.byFields tableName howMatched fields (Sql.existingConnection conn)
- /// Determine if a document exists using a JSON containment query (@>)
- /// The NpgsqlConnection on which to run the query
+ /// Determine if a document exists using a JSON containment query (@>)
+ /// The NpgsqlConnection on which to run the query
/// The table in which existence should be checked (may include schema)
/// The document to match with the containment query
/// True if any matching documents exist, false if not
@@ -524,8 +864,8 @@ type NpgsqlConnectionCSharpExtensions =
static member inline ExistsByContains(conn, tableName, criteria: 'TCriteria) =
Exists.byContains tableName criteria (Sql.existingConnection conn)
- /// Determine if a document exists using a JSON Path match query (@?)
- /// The NpgsqlConnection on which to run the query
+ /// Determine if a document exists using a JSON Path match query (@?)
+ /// The NpgsqlConnection on which to run the query
/// The table in which existence should be checked (may include schema)
/// The JSON Path expression to be matched
/// True if any matching documents exist, false if not
@@ -534,7 +874,7 @@ type NpgsqlConnectionCSharpExtensions =
Exists.byJsonPath tableName jsonPath (Sql.existingConnection conn)
/// Retrieve all documents in the given table
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table from which documents should be retrieved (may include schema)
/// All documents from the given table
[]
@@ -542,7 +882,7 @@ type NpgsqlConnectionCSharpExtensions =
Find.All<'TDoc>(tableName, Sql.existingConnection conn)
/// Retrieve all documents in the given table ordered by the given fields in the document
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table from which documents should be retrieved (may include schema)
/// Fields by which the results should be ordered
/// All documents from the given table, ordered by the given fields
@@ -551,16 +891,16 @@ type NpgsqlConnectionCSharpExtensions =
Find.AllOrdered<'TDoc>(tableName, orderFields, Sql.existingConnection conn)
/// Retrieve a document by its ID
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table from which a document should be retrieved (may include schema)
/// The ID of the document to retrieve
- /// The document if found, null otherwise
+ /// The document if found, null otherwise
[]
static member inline FindById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(conn, tableName, docId: 'TKey) =
Find.ById<'TKey, 'TDoc>(tableName, docId, Sql.existingConnection conn)
- /// Retrieve documents matching JSON field comparisons (->> =, etc.)
- /// The NpgsqlConnection on which to run the query
+ /// Retrieve documents matching JSON field comparisons (->> =, etc.)
+ /// The NpgsqlConnection on which to run the query
/// The table from which documents should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
@@ -570,10 +910,10 @@ type NpgsqlConnectionCSharpExtensions =
Find.ByFields<'TDoc>(tableName, howMatched, fields, Sql.existingConnection conn)
///
- /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in
- /// the document
+ /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the
+ /// document
///
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table from which documents should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
@@ -584,8 +924,8 @@ type NpgsqlConnectionCSharpExtensions =
Find.ByFieldsOrdered<'TDoc>(
tableName, howMatched, queryFields, orderFields, Sql.existingConnection conn)
- /// Retrieve documents matching a JSON containment query (@>)
- /// The NpgsqlConnection on which to run the query
+ /// Retrieve documents matching a JSON containment query (@>)
+ /// The NpgsqlConnection on which to run the query
/// The table from which documents should be retrieved (may include schema)
/// The document to match with the containment query
/// All documents matching the given containment query
@@ -594,10 +934,9 @@ type NpgsqlConnectionCSharpExtensions =
Find.ByContains<'TDoc>(tableName, criteria, Sql.existingConnection conn)
///
- /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the
- /// document
+ /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the document
///
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table from which documents should be retrieved (may include schema)
/// The document to match with the containment query
/// Fields by which the results should be ordered
@@ -606,8 +945,8 @@ type NpgsqlConnectionCSharpExtensions =
static member inline FindByContainsOrdered<'TDoc>(conn, tableName, criteria: obj, orderFields) =
Find.ByContainsOrdered<'TDoc>(tableName, criteria, orderFields, Sql.existingConnection conn)
- /// Retrieve documents matching a JSON Path match query (@?)
- /// The NpgsqlConnection on which to run the query
+ /// Retrieve documents matching a JSON Path match query (@?)
+ /// The NpgsqlConnection on which to run the query
/// The table from which documents should be retrieved (may include schema)
/// The JSON Path expression to match
/// All documents matching the given JSON Path expression
@@ -616,9 +955,9 @@ type NpgsqlConnectionCSharpExtensions =
Find.ByJsonPath<'TDoc>(tableName, jsonPath, Sql.existingConnection conn)
///
- /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document
+ /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document
///
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table from which documents should be retrieved (may include schema)
/// The JSON Path expression to match
/// Fields by which the results should be ordered
@@ -627,82 +966,424 @@ type NpgsqlConnectionCSharpExtensions =
static member inline FindByJsonPathOrdered<'TDoc>(conn, tableName, jsonPath, orderFields) =
Find.ByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, Sql.existingConnection conn)
- /// Retrieve the first document matching JSON field comparisons (->> =, etc.)
- /// The NpgsqlConnection on which to run the query
+ /// Retrieve the first document matching JSON field comparisons (->> =, etc.)
+ /// The NpgsqlConnection on which to run the query
/// The table from which a document should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
- /// The first document, or null if not found
+ /// The first document, or null if not found
[]
static member inline FindFirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(
conn, tableName, howMatched, fields) =
Find.FirstByFields<'TDoc>(tableName, howMatched, fields, Sql.existingConnection conn)
///
- /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given
+ /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given
/// fields in the document
///
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table from which a document should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
/// Fields by which the results should be ordered
- /// The first document ordered by the given fields, or null if not found
+ /// The first document ordered by the given fields, or null if not found
[]
static member inline FindFirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>(
conn, tableName, howMatched, queryFields, orderFields) =
Find.FirstByFieldsOrdered<'TDoc>(
tableName, howMatched, queryFields, orderFields, Sql.existingConnection conn)
- /// Retrieve the first document matching a JSON containment query (@>)
- /// The NpgsqlConnection on which to run the query
+ /// Retrieve the first document matching a JSON containment query (@>)
+ /// The NpgsqlConnection on which to run the query
/// The table from which a document should be retrieved (may include schema)
/// The document to match with the containment query
- /// The first document, or null if not found
+ /// The first document, or null if not found
[]
static member inline FindFirstByContains<'TDoc when 'TDoc: null and 'TDoc: not struct>(
conn, tableName, criteria: obj) =
Find.FirstByContains<'TDoc>(tableName, criteria, Sql.existingConnection conn)
///
- /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in
- /// the document
+ /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the
+ /// document
///
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table from which a document should be retrieved (may include schema)
/// The document to match with the containment query
/// Fields by which the results should be ordered
- /// The first document ordered by the given fields, or null if not found
+ /// The first document ordered by the given fields, or null if not found
[]
static member inline FindFirstByContainsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>(
conn, tableName, criteria: obj, orderFields) =
Find.FirstByContainsOrdered<'TDoc>(tableName, criteria, orderFields, Sql.existingConnection conn)
- /// Retrieve the first document matching a JSON Path match query (@?)
- /// The NpgsqlConnection on which to run the query
+ /// Retrieve the first document matching a JSON Path match query (@?)
+ /// The NpgsqlConnection on which to run the query
/// The table from which a document should be retrieved (may include schema)
/// The JSON Path expression to match
- /// The first document, or null if not found
+ /// The first document, or null if not found
[]
static member inline FindFirstByJsonPath<'TDoc when 'TDoc: null and 'TDoc: not struct>(conn, tableName, jsonPath) =
Find.FirstByJsonPath<'TDoc>(tableName, jsonPath, Sql.existingConnection conn)
///
- /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the
+ /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the
/// document
///
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table from which a document should be retrieved (may include schema)
/// The JSON Path expression to match
/// Fields by which the results should be ordered
- /// The first document ordered by the given fields, or null if not found
+ /// The first document ordered by the given fields, or null if not found
[]
static member inline FindFirstByJsonPathOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>(
conn, tableName, jsonPath, orderFields) =
Find.FirstByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, Sql.existingConnection conn)
+ /// Retrieve all documents in the given table as a JSON array
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// All documents from the given table as a JSON array
+ []
+ static member inline JsonAll(conn, tableName) =
+ Json.all tableName (Sql.existingConnection conn)
+
+ /// Write all documents in the given table to the given PipeWriter
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ []
+ static member inline WriteJsonAll(conn, tableName, writer) =
+ Json.writeAll tableName writer (Sql.existingConnection conn)
+
+ ///
+ /// Retrieve all documents in the given table as a JSON array, ordered by the given fields in the document
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// Fields by which the results should be ordered
+ /// All documents from the given table as a JSON array, ordered by the given fields
+ []
+ static member inline JsonAllOrdered(conn, tableName, orderFields) =
+ Json.allOrdered tableName orderFields (Sql.existingConnection conn)
+
+ ///
+ /// Write all documents in the given table to the given PipeWriter, ordered by the given fields in the
+ /// document
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Fields by which the results should be ordered
+ []
+ static member inline WriteJsonAllOrdered(conn, tableName, writer, orderFields) =
+ Json.writeAllOrdered tableName writer orderFields (Sql.existingConnection conn)
+
+ /// Retrieve a JSON document by its ID
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which a document should be retrieved (may include schema)
+ /// The ID of the document to retrieve
+ /// The JSON document if found, an empty JSON document otherwise
+ []
+ static member inline JsonById<'TKey>(conn, tableName, docId: 'TKey) =
+ Json.byId tableName docId (Sql.existingConnection conn)
+
+ /// Write a JSON document to the given PipeWriter by its ID
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The ID of the document to retrieve
+ []
+ static member inline WriteJsonById<'TKey>(conn, tableName, writer, docId) =
+ Json.writeById tableName writer docId (Sql.existingConnection conn)
+
+ /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.)
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// All JSON documents matching the given fields
+ []
+ static member inline JsonByFields(conn, tableName, howMatched, fields) =
+ Json.byFields tableName howMatched fields (Sql.existingConnection conn)
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.)
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ []
+ static member inline WriteJsonByFields(conn, tableName, writer, howMatched, fields) =
+ Json.writeByFields tableName writer howMatched fields (Sql.existingConnection conn)
+
+ ///
+ /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given fields
+ /// in the document
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ /// All JSON documents matching the given fields, ordered by the other given fields
+ []
+ static member inline JsonByFieldsOrdered(conn, tableName, howMatched, queryFields, orderFields) =
+ Json.byFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn)
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.)
+ /// ordered by the given fields in the document
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ []
+ static member inline WriteJsonByFieldsOrdered(conn, tableName, writer, howMatched, queryFields, orderFields) =
+ Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields (Sql.existingConnection conn)
+
+ /// Retrieve JSON documents matching a JSON containment query (@>)
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// All JSON documents matching the given containment query
+ []
+ static member inline JsonByContains(conn, tableName, criteria: obj) =
+ Json.byContains tableName criteria (Sql.existingConnection conn)
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>)
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ []
+ static member inline WriteJsonByContains(conn, tableName, writer, criteria: obj) =
+ Json.writeByContains tableName writer criteria (Sql.existingConnection conn)
+
+ ///
+ /// Retrieve JSON documents matching a JSON containment query (@>) ordered by the given fields in the
+ /// document
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ /// All documents matching the given containment query, ordered by the given fields
+ []
+ static member inline JsonByContainsOrdered(conn, tableName, criteria: obj, orderFields) =
+ Json.byContainsOrdered tableName criteria orderFields (Sql.existingConnection conn)
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) ordered by
+ /// the given fields in the document
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ []
+ static member inline WriteJsonByContainsOrdered(conn, tableName, writer, criteria: obj, orderFields) =
+ Json.writeByContainsOrdered tableName writer criteria orderFields (Sql.existingConnection conn)
+
+ /// Retrieve JSON documents matching a JSON Path match query (@?)
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// All JSON documents matching the given JSON Path expression
+ []
+ static member inline JsonByJsonPath(conn, tableName, jsonPath) =
+ Json.byJsonPath tableName jsonPath (Sql.existingConnection conn)
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?)
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The JSON Path expression to match
+ []
+ static member inline WriteJsonByJsonPath(conn, tableName, writer, jsonPath) =
+ Json.writeByJsonPath tableName writer jsonPath (Sql.existingConnection conn)
+
+ ///
+ /// Retrieve JSON documents matching a JSON Path match query (@?) ordered by the given fields in the document
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ /// All JSON documents matching the given JSON Path expression, ordered by the given fields
+ []
+ static member inline JsonByJsonPathOrdered(conn, tableName, jsonPath, orderFields) =
+ Json.byJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn)
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) ordered by the
+ /// given fields in the document
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ []
+ static member inline WriteJsonByJsonPathOrdered(conn, tableName, writer, jsonPath, orderFields) =
+ Json.writeByJsonPathOrdered tableName writer jsonPath orderFields (Sql.existingConnection conn)
+
+ /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.)
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which a document should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ static member inline JsonFirstByFields(conn, tableName, howMatched, fields) =
+ Json.firstByFields tableName howMatched fields (Sql.existingConnection conn)
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching JSON field comparisons
+ /// (->> =, etc.)
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ []
+ static member inline WriteJsonFirstByFields(conn, tableName, writer, howMatched, fields) =
+ Json.writeFirstByFields tableName writer howMatched fields (Sql.existingConnection conn)
+
+ ///
+ /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the
+ /// given fields in the document
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which a document should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ static member inline JsonFirstByFieldsOrdered(conn, tableName, howMatched, queryFields, orderFields) =
+ Json.firstByFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn)
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching JSON field comparisons
+ /// (->> =, etc.) ordered by the given fields in the document
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ []
+ static member inline WriteJsonFirstByFieldsOrdered(conn, tableName, writer, howMatched, queryFields, orderFields) =
+ Json.writeFirstByFieldsOrdered
+ tableName writer howMatched queryFields orderFields (Sql.existingConnection conn)
+
+ /// Retrieve the first JSON document matching a JSON containment query (@>)
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which a document should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ static member inline JsonFirstByContains(conn, tableName, criteria: obj) =
+ Json.firstByContains tableName criteria (Sql.existingConnection conn)
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>)
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ []
+ static member inline WriteJsonFirstByContains(conn, tableName, writer, criteria: obj) =
+ Json.writeFirstByContains tableName writer criteria (Sql.existingConnection conn)
+
+ ///
+ /// Retrieve the first JSON document matching a JSON containment query (@>) ordered by the given fields in
+ /// the document
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which a document should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ static member inline JsonFirstByContainsOrdered(conn, tableName, criteria: obj, orderFields) =
+ Json.firstByContainsOrdered tableName criteria orderFields (Sql.existingConnection conn)
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>)
+ /// ordered by the given fields in the document
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ []
+ static member inline WriteJsonFirstByContainsOrdered(conn, tableName, writer, criteria: obj, orderFields) =
+ Json.writeFirstByContainsOrdered tableName writer criteria orderFields (Sql.existingConnection conn)
+
+ /// Retrieve the first JSON document matching a JSON Path match query (@?)
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which a document should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ static member inline JsonFirstByJsonPath(conn, tableName, jsonPath) =
+ Json.firstByJsonPath tableName jsonPath (Sql.existingConnection conn)
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?)
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The JSON Path expression to match
+ []
+ static member inline WriteJsonFirstByJsonPath(conn, tableName, writer, jsonPath) =
+ Json.writeFirstByJsonPath tableName writer jsonPath (Sql.existingConnection conn)
+
+ ///
+ /// Retrieve the first JSON document matching a JSON Path match query (@?) ordered by the given fields in the
+ /// document
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which a document should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ static member inline JsonFirstByJsonPathOrdered(conn, tableName, jsonPath, orderFields) =
+ Json.firstByJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn)
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?)
+ /// ordered by the given fields in the document
+ ///
+ /// The NpgsqlConnection on which to run the query
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ []
+ static member inline WriteJsonFirstByJsonPathOrdered(conn, tableName, writer, jsonPath, orderFields) =
+ Json.writeFirstByJsonPathOrdered tableName writer jsonPath orderFields (Sql.existingConnection conn)
+
/// Update (replace) an entire document by its ID
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table in which a document should be updated (may include schema)
/// The ID of the document to be updated (replaced)
/// The new document
@@ -713,7 +1394,7 @@ type NpgsqlConnectionCSharpExtensions =
///
/// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document
///
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table in which a document should be updated (may include schema)
/// The function to obtain the ID of the document
/// The new document
@@ -722,7 +1403,7 @@ type NpgsqlConnectionCSharpExtensions =
Update.ByFunc(tableName, idFunc, document, Sql.existingConnection conn)
/// Patch a document by its ID
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table in which a document should be patched (may include schema)
/// The ID of the document to patch
/// The partial document to patch the existing document
@@ -731,9 +1412,9 @@ type NpgsqlConnectionCSharpExtensions =
Patch.byId tableName docId patch (Sql.existingConnection conn)
///
- /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.)
+ /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.)
///
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table in which documents should be patched (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
@@ -742,8 +1423,8 @@ type NpgsqlConnectionCSharpExtensions =
static member inline PatchByFields(conn, tableName, howMatched, fields, patch: 'TPatch) =
Patch.byFields tableName howMatched fields patch (Sql.existingConnection conn)
- /// Patch documents using a JSON containment query in the WHERE clause (@>)
- /// The NpgsqlConnection on which to run the query
+ /// Patch documents using a JSON containment query in the WHERE clause (@>)
+ /// The NpgsqlConnection on which to run the query
/// The table in which documents should be patched (may include schema)
/// The document to match the containment query
/// The partial document to patch the existing document
@@ -751,8 +1432,8 @@ type NpgsqlConnectionCSharpExtensions =
static member inline PatchByContains(conn, tableName, criteria: 'TCriteria, patch: 'TPatch) =
Patch.byContains tableName criteria patch (Sql.existingConnection conn)
- /// Patch documents using a JSON Path match query in the WHERE clause (@?)
- /// The NpgsqlConnection on which to run the query
+ /// Patch documents using a JSON Path match query in the WHERE clause (@?)
+ /// The NpgsqlConnection on which to run the query
/// The table in which documents should be patched (may include schema)
/// The JSON Path expression to match
/// The partial document to patch the existing document
@@ -761,7 +1442,7 @@ type NpgsqlConnectionCSharpExtensions =
Patch.byJsonPath tableName jsonPath patch (Sql.existingConnection conn)
/// Remove fields from a document by the document's ID
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table in which a document should be modified (may include schema)
/// The ID of the document to modify
/// One or more field names to remove from the document
@@ -770,7 +1451,7 @@ type NpgsqlConnectionCSharpExtensions =
RemoveFields.byId tableName docId fieldNames (Sql.existingConnection conn)
/// Remove fields from documents via a comparison on JSON fields in the document
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table in which documents should be modified (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
@@ -779,8 +1460,8 @@ type NpgsqlConnectionCSharpExtensions =
static member inline RemoveFieldsByFields(conn, tableName, howMatched, fields, fieldNames) =
RemoveFields.byFields tableName howMatched fields fieldNames (Sql.existingConnection conn)
- /// Remove fields from documents via a JSON containment query (@>)
- /// The NpgsqlConnection on which to run the query
+ /// Remove fields from documents via a JSON containment query (@>)
+ /// The NpgsqlConnection on which to run the query
/// The table in which documents should be modified (may include schema)
/// The document to match the containment query
/// One or more field names to remove from the matching documents
@@ -788,8 +1469,8 @@ type NpgsqlConnectionCSharpExtensions =
static member inline RemoveFieldsByContains(conn, tableName, criteria: 'TContains, fieldNames) =
RemoveFields.byContains tableName criteria fieldNames (Sql.existingConnection conn)
- /// Remove fields from documents via a JSON Path match query (@?)
- /// The NpgsqlConnection on which to run the query
+ /// Remove fields from documents via a JSON Path match query (@?)
+ /// The NpgsqlConnection on which to run the query
/// The table in which documents should be modified (may include schema)
/// The JSON Path expression to match
/// One or more field names to remove from the matching documents
@@ -798,15 +1479,15 @@ type NpgsqlConnectionCSharpExtensions =
RemoveFields.byJsonPath tableName jsonPath fieldNames (Sql.existingConnection conn)
/// Delete a document by its ID
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table in which a document should be deleted (may include schema)
/// The ID of the document to delete
[]
static member inline DeleteById(conn, tableName, docId: 'TKey) =
Delete.byId tableName docId (Sql.existingConnection conn)
- /// Delete documents by matching a JSON field comparison query (->> =, etc.)
- /// The NpgsqlConnection on which to run the query
+ /// Delete documents by matching a JSON field comparison query (->> =, etc.)
+ /// The NpgsqlConnection on which to run the query
/// The table in which documents should be deleted (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
@@ -814,8 +1495,8 @@ type NpgsqlConnectionCSharpExtensions =
static member inline DeleteByFields(conn, tableName, howMatched, fields) =
Delete.byFields tableName howMatched fields (Sql.existingConnection conn)
- /// Delete documents by matching a JSON contains query (@>)
- /// The NpgsqlConnection on which to run the query
+ /// Delete documents by matching a JSON contains query (@>)
+ /// The NpgsqlConnection on which to run the query
/// The table in which documents should be deleted (may include schema)
/// The document to match the containment query
[]
@@ -823,7 +1504,7 @@ type NpgsqlConnectionCSharpExtensions =
Delete.byContains tableName criteria (Sql.existingConnection conn)
/// Delete documents by matching a JSON Path match query (@?)
- /// The NpgsqlConnection on which to run the query
+ /// The NpgsqlConnection on which to run the query
/// The table in which documents should be deleted (may include schema)
/// The JSON Path expression to match
[]
diff --git a/src/Postgres/Functions.fs b/src/Postgres/Functions.fs
index fb189e9..cf4d9f6 100644
--- a/src/Postgres/Functions.fs
+++ b/src/Postgres/Functions.fs
@@ -21,11 +21,45 @@ module Custom =
let List<'TDoc>(query, parameters, mapFunc: System.Func) =
WithProps.Custom.List<'TDoc>(query, parameters, mapFunc, fromDataSource ())
+ /// Execute a query that returns a JSON array of results
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The mapping function to extract the document
+ /// A JSON array of results for the given query
+ []
+ let jsonArray query parameters mapFunc =
+ WithProps.Custom.jsonArray query parameters mapFunc (fromDataSource ())
+
+ /// Execute a query that returns a JSON array of results
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The mapping function to extract the document
+ /// A JSON array of results for the given query
+ let JsonArray(query, parameters, mapFunc) =
+ WithProps.Custom.JsonArray(query, parameters, mapFunc, fromDataSource ())
+
+ /// Execute a query, writing its results to the given PipeWriter
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The PipeWriter to which the results should be written
+ /// The mapping function to extract the document
+ []
+ let writeJsonArray query parameters writer mapFunc =
+ WithProps.Custom.writeJsonArray query parameters writer mapFunc (fromDataSource ())
+
+ /// Execute a query, writing its results to the given PipeWriter
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The PipeWriter to which the results should be written
+ /// The mapping function to extract the document
+ let WriteJsonArray(query, parameters, writer, mapFunc) =
+ WithProps.Custom.WriteJsonArray(query, parameters, writer, mapFunc, fromDataSource ())
+
/// Execute a query that returns one or no results
/// The query to retrieve the results
/// Parameters to use for the query
/// The mapping function between the document and the domain item
- /// Some with the first matching result, or None if not found
+ /// Some with the first matching result, or None if not found
[]
let single<'TDoc> query parameters (mapFunc: RowReader -> 'TDoc) =
WithProps.Custom.single<'TDoc> query parameters mapFunc (fromDataSource ())
@@ -34,11 +68,28 @@ module Custom =
/// The query to retrieve the results
/// Parameters to use for the query
/// The mapping function between the document and the domain item
- /// The first matching result, or null if not found
+ /// The first matching result, or null if not found
let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>(
query, parameters, mapFunc: System.Func) =
WithProps.Custom.Single<'TDoc>(query, parameters, mapFunc, fromDataSource ())
+ /// Execute a query that returns one or no JSON documents
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The mapping function to extract the document
+ /// The JSON document with the first matching result, or an empty document if not found
+ []
+ let jsonSingle query parameters mapFunc =
+ WithProps.Custom.jsonSingle query parameters mapFunc (fromDataSource ())
+
+ /// Execute a query that returns one or no JSON documents
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The mapping function to extract the document
+ /// The JSON document with the first matching result, or an empty document if not found
+ let JsonSingle(query, parameters, mapFunc) =
+ WithProps.Custom.JsonSingle(query, parameters, mapFunc, fromDataSource ())
+
/// Execute a query that returns no results
/// The query to retrieve the results
/// Parameters to use for the query
@@ -120,7 +171,7 @@ module Count =
let all tableName =
WithProps.Count.all tableName (fromDataSource ())
- /// Count matching documents using JSON field comparisons (->> =, etc.)
+ /// Count matching documents using JSON field comparisons (->> =, etc.)
/// The table in which documents should be counted (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
@@ -129,7 +180,7 @@ module Count =
let byFields tableName howMatched fields =
WithProps.Count.byFields tableName howMatched fields (fromDataSource ())
- /// Count matching documents using a JSON containment query (@>)
+ /// Count matching documents using a JSON containment query (@>)
/// The table in which documents should be counted (may include schema)
/// The document to match with the containment query
/// The count of the documents in the table
@@ -137,7 +188,7 @@ module Count =
let byContains tableName criteria =
WithProps.Count.byContains tableName criteria (fromDataSource ())
- /// Count matching documents using a JSON Path match query (@?)
+ /// Count matching documents using a JSON Path match query (@?)
/// The table in which documents should be counted (may include schema)
/// The JSON Path expression to be matched
/// The count of the documents in the table
@@ -158,7 +209,7 @@ module Exists =
let byId tableName docId =
WithProps.Exists.byId tableName docId (fromDataSource ())
- /// Determine if a document exists using JSON field comparisons (->> =, etc.)
+ /// Determine if a document exists using JSON field comparisons (->> =, etc.)
/// The table in which existence should be checked (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
@@ -167,7 +218,7 @@ module Exists =
let byFields tableName howMatched fields =
WithProps.Exists.byFields tableName howMatched fields (fromDataSource ())
- /// Determine if a document exists using a JSON containment query (@>)
+ /// Determine if a document exists using a JSON containment query (@>)
/// The table in which existence should be checked (may include schema)
/// The document to match with the containment query
/// True if any matching documents exist, false if not
@@ -175,7 +226,7 @@ module Exists =
let byContains tableName criteria =
WithProps.Exists.byContains tableName criteria (fromDataSource ())
- /// Determine if a document exists using a JSON Path match query (@?)
+ /// Determine if a document exists using a JSON Path match query (@?)
/// The table in which existence should be checked (may include schema)
/// The JSON Path expression to be matched
/// True if any matching documents exist, false if not
@@ -184,7 +235,7 @@ module Exists =
WithProps.Exists.byJsonPath tableName jsonPath (fromDataSource ())
-/// Commands to retrieve documents
+/// Commands to retrieve documents as domain objects
[]
module Find =
@@ -219,7 +270,7 @@ module Find =
/// Retrieve a document by its ID
/// The table from which a document should be retrieved (may include schema)
/// The ID of the document to retrieve
- /// Some with the document if found, None otherwise
+ /// Some with the document if found, None otherwise
[]
let byId<'TKey, 'TDoc> tableName docId =
WithProps.Find.byId<'TKey, 'TDoc> tableName docId (fromDataSource ())
@@ -227,11 +278,11 @@ module Find =
/// Retrieve a document by its ID
/// The table from which a document should be retrieved (may include schema)
/// The ID of the document to retrieve
- /// The document if found, null otherwise
+ /// The document if found, null otherwise
let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId: 'TKey) =
WithProps.Find.ById<'TKey, 'TDoc>(tableName, docId, fromDataSource ())
- /// Retrieve documents matching JSON field comparisons (->> =, etc.)
+ /// Retrieve documents matching JSON field comparisons (->> =, etc.)
/// The table from which documents should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
@@ -240,7 +291,7 @@ module Find =
let byFields<'TDoc> tableName howMatched fields =
WithProps.Find.byFields<'TDoc> tableName howMatched fields (fromDataSource ())
- /// Retrieve documents matching JSON field comparisons (->> =, etc.)
+ /// Retrieve documents matching JSON field comparisons (->> =, etc.)
/// The table from which documents should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
@@ -249,8 +300,8 @@ module Find =
WithProps.Find.ByFields<'TDoc>(tableName, howMatched, fields, fromDataSource ())
///
- /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in
- /// the document
+ /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the
+ /// document
///
/// The table from which documents should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
@@ -262,8 +313,8 @@ module Find =
WithProps.Find.byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields (fromDataSource ())
///
- /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in
- /// the document
+ /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the
+ /// document
///
/// The table from which documents should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
@@ -273,7 +324,7 @@ module Find =
let ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields) =
WithProps.Find.ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, fromDataSource ())
- /// Retrieve documents matching a JSON containment query (@>)
+ /// Retrieve documents matching a JSON containment query (@>)
/// The table from which documents should be retrieved (may include schema)
/// The document to match with the containment query
/// All documents matching the given containment query
@@ -281,7 +332,7 @@ module Find =
let byContains<'TDoc> tableName (criteria: obj) =
WithProps.Find.byContains<'TDoc> tableName criteria (fromDataSource ())
- /// Retrieve documents matching a JSON containment query (@>)
+ /// Retrieve documents matching a JSON containment query (@>)
/// The table from which documents should be retrieved (may include schema)
/// The document to match with the containment query
/// All documents matching the given containment query
@@ -289,8 +340,7 @@ module Find =
WithProps.Find.ByContains<'TDoc>(tableName, criteria, fromDataSource ())
///
- /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the
- /// document
+ /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the document
///
/// The table from which documents should be retrieved (may include schema)
/// The document to match with the containment query
@@ -301,8 +351,7 @@ module Find =
WithProps.Find.byContainsOrdered<'TDoc> tableName criteria orderFields (fromDataSource ())
///
- /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the
- /// document
+ /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the document
///
/// The table from which documents should be retrieved (may include schema)
/// The document to match with the containment query
@@ -311,7 +360,7 @@ module Find =
let ByContainsOrdered<'TDoc>(tableName, criteria: obj, orderFields) =
WithProps.Find.ByContainsOrdered<'TDoc>(tableName, criteria, orderFields, fromDataSource ())
- /// Retrieve documents matching a JSON Path match query (@?)
+ /// Retrieve documents matching a JSON Path match query (@?)
/// The table from which documents should be retrieved (may include schema)
/// The JSON Path expression to match
/// All documents matching the given JSON Path expression
@@ -319,7 +368,7 @@ module Find =
let byJsonPath<'TDoc> tableName jsonPath =
WithProps.Find.byJsonPath<'TDoc> tableName jsonPath (fromDataSource ())
- /// Retrieve documents matching a JSON Path match query (@?)
+ /// Retrieve documents matching a JSON Path match query (@?)
/// The table from which documents should be retrieved (may include schema)
/// The JSON Path expression to match
/// All documents matching the given JSON Path expression
@@ -327,7 +376,7 @@ module Find =
WithProps.Find.ByJsonPath<'TDoc>(tableName, jsonPath, fromDataSource ())
///
- /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document
+ /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document
///
/// The table from which documents should be retrieved (may include schema)
/// The JSON Path expression to match
@@ -338,7 +387,7 @@ module Find =
WithProps.Find.byJsonPathOrdered<'TDoc> tableName jsonPath orderFields (fromDataSource ())
///
- /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document
+ /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document
///
/// The table from which documents should be retrieved (may include schema)
/// The JSON Path expression to match
@@ -347,132 +396,442 @@ module Find =
let ByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields) =
WithProps.Find.ByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, fromDataSource ())
- /// Retrieve the first document matching JSON field comparisons (->> =, etc.)
+ /// Retrieve the first document matching JSON field comparisons (->> =, etc.)
/// The table from which a document should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
- /// Some with the first document, or None if not found
+ /// Some with the first document, or None if not found
[]
let firstByFields<'TDoc> tableName howMatched fields =
WithProps.Find.firstByFields<'TDoc> tableName howMatched fields (fromDataSource ())
- /// Retrieve the first document matching JSON field comparisons (->> =, etc.)
+ /// Retrieve the first document matching JSON field comparisons (->> =, etc.)
/// The table from which a document should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
- /// The first document, or null if not found
+ /// The first document, or null if not found
let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields) =
WithProps.Find.FirstByFields<'TDoc>(tableName, howMatched, fields, fromDataSource ())
///
- /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given
+ /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given
/// fields in the document
///
/// The table from which a document should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
/// Fields by which the results should be ordered
- ///
- /// Some with the first document ordered by the given fields, or None if not found
- ///
+ /// Some with the first document ordered by the given fields, or None if not found
[]
let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields =
WithProps.Find.firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields (fromDataSource ())
///
- /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given
+ /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given
/// fields in the document
///
/// The table from which a document should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
/// Fields by which the results should be ordered
- /// The first document ordered by the given fields, or null if not found
+ /// The first document ordered by the given fields, or null if not found
let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>(
tableName, howMatched, queryFields, orderFields) =
WithProps.Find.FirstByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, fromDataSource ())
- /// Retrieve the first document matching a JSON containment query (@>)
+ /// Retrieve the first document matching a JSON containment query (@>)
/// The table from which a document should be retrieved (may include schema)
/// The document to match with the containment query
- /// Some with the first document, or None if not found
+ /// Some with the first document, or None if not found
[]
let firstByContains<'TDoc> tableName (criteria: obj) =
WithProps.Find.firstByContains<'TDoc> tableName criteria (fromDataSource ())
- /// Retrieve the first document matching a JSON containment query (@>)
+ /// Retrieve the first document matching a JSON containment query (@>)
/// The table from which a document should be retrieved (may include schema)
/// The document to match with the containment query
- /// The first document, or null if not found
+ /// The first document, or null if not found
let FirstByContains<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, criteria: obj) =
WithProps.Find.FirstByContains<'TDoc>(tableName, criteria, fromDataSource ())
///
- /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in
- /// the document
+ /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the
+ /// document
///
/// The table from which a document should be retrieved (may include schema)
/// The document to match with the containment query
/// Fields by which the results should be ordered
- ///
- /// Some with the first document ordered by the given fields, or None if not found
- ///
+ /// Some with the first document ordered by the given fields, or None if not found
[]
let firstByContainsOrdered<'TDoc> tableName (criteria: obj) orderFields =
WithProps.Find.firstByContainsOrdered<'TDoc> tableName criteria orderFields (fromDataSource ())
///
- /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in
- /// the document
+ /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the
+ /// document
///
/// The table from which a document should be retrieved (may include schema)
/// The document to match with the containment query
/// Fields by which the results should be ordered
- /// The first document ordered by the given fields, or null if not found
+ /// The first document ordered by the given fields, or null if not found
let FirstByContainsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, criteria: obj, orderFields) =
WithProps.Find.FirstByContainsOrdered<'TDoc>(tableName, criteria, orderFields, fromDataSource ())
- /// Retrieve the first document matching a JSON Path match query (@?)
+ /// Retrieve the first document matching a JSON Path match query (@?)
/// The table from which a document should be retrieved (may include schema)
/// The JSON Path expression to match
- /// Some with the first document, or None if not found
+ /// Some with the first document, or None if not found
[]
let firstByJsonPath<'TDoc> tableName jsonPath =
WithProps.Find.firstByJsonPath<'TDoc> tableName jsonPath (fromDataSource ())
- /// Retrieve the first document matching a JSON Path match query (@?)
+ /// Retrieve the first document matching a JSON Path match query (@?)
/// The table from which a document should be retrieved (may include schema)
/// The JSON Path expression to match
- /// The first document, or null if not found
+ /// The first document, or null if not found
let FirstByJsonPath<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, jsonPath) =
WithProps.Find.FirstByJsonPath<'TDoc>(tableName, jsonPath, fromDataSource ())
///
- /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the
+ /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the
/// document
///
/// The table from which a document should be retrieved (may include schema)
/// The JSON Path expression to match
/// Fields by which the results should be ordered
- ///
- /// Some with the first document ordered by the given fields, or None if not found
- ///
+ /// Some with the first document ordered by the given fields, or None if not found
[]
let firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields =
WithProps.Find.firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields (fromDataSource ())
///
- /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the
+ /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the
/// document
///
/// The table from which a document should be retrieved (may include schema)
/// The JSON Path expression to match
/// Fields by which the results should be ordered
- /// The first document ordered by the given fields, or null if not found
+ /// The first document ordered by the given fields, or null if not found
let FirstByJsonPathOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, jsonPath, orderFields) =
WithProps.Find.FirstByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, fromDataSource ())
+/// Commands to retrieve documents as JSON
+[]
+module Json =
+
+ /// Retrieve all documents in the given table as a JSON array
+ /// The table from which documents should be retrieved (may include schema)
+ /// All documents from the given table as a JSON array
+ []
+ let all tableName =
+ WithProps.Json.all tableName (fromDataSource ())
+
+ /// Write all documents in the given table to the given PipeWriter
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ []
+ let writeAll tableName writer =
+ WithProps.Json.writeAll tableName writer (fromDataSource ())
+
+ ///
+ /// Retrieve all documents in the given table as a JSON array, ordered by the given fields in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// Fields by which the results should be ordered
+ /// All documents from the given table as a JSON array, ordered by the given fields
+ []
+ let allOrdered tableName orderFields =
+ WithProps.Json.allOrdered tableName orderFields (fromDataSource ())
+
+ ///
+ /// Write all documents in the given table to the given PipeWriter, ordered by the given fields in the
+ /// document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Fields by which the results should be ordered
+ []
+ let writeAllOrdered tableName writer orderFields =
+ WithProps.Json.writeAllOrdered tableName writer orderFields (fromDataSource ())
+
+ /// Retrieve a JSON document by its ID
+ /// The table from which a document should be retrieved (may include schema)
+ /// The ID of the document to retrieve
+ /// The JSON document if found, an empty JSON document otherwise
+ []
+ let byId<'TKey> tableName (docId: 'TKey) =
+ WithProps.Json.byId tableName docId (fromDataSource ())
+
+ /// Write a JSON document to the given PipeWriter by its ID
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The ID of the document to retrieve
+ []
+ let writeById<'TKey> tableName writer (docId: 'TKey) =
+ WithProps.Json.writeById tableName writer docId (fromDataSource ())
+
+ /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.)
+ /// The table from which documents should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// All JSON documents matching the given fields
+ []
+ let byFields tableName howMatched fields =
+ WithProps.Json.byFields tableName howMatched fields (fromDataSource ())
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.)
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ []
+ let writeByFields tableName writer howMatched fields =
+ WithProps.Json.writeByFields tableName writer howMatched fields (fromDataSource ())
+
+ ///
+ /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given fields
+ /// in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ /// All JSON documents matching the given fields, ordered by the other given fields
+ []
+ let byFieldsOrdered tableName howMatched queryFields orderFields =
+ WithProps.Json.byFieldsOrdered tableName howMatched queryFields orderFields (fromDataSource ())
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.)
+ /// ordered by the given fields in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ []
+ let writeByFieldsOrdered tableName writer howMatched queryFields orderFields =
+ WithProps.Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields (fromDataSource ())
+
+ /// Retrieve JSON documents matching a JSON containment query (@>)
+ /// The table from which documents should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// All JSON documents matching the given containment query
+ []
+ let byContains tableName (criteria: obj) =
+ WithProps.Json.byContains tableName criteria (fromDataSource ())
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>)
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ []
+ let writeByContains tableName writer (criteria: obj) =
+ WithProps.Json.writeByContains tableName writer criteria (fromDataSource ())
+
+ ///
+ /// Retrieve JSON documents matching a JSON containment query (@>) ordered by the given fields in the
+ /// document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ /// All documents matching the given containment query, ordered by the given fields
+ []
+ let byContainsOrdered tableName (criteria: obj) orderFields =
+ WithProps.Json.byContainsOrdered tableName criteria orderFields (fromDataSource ())
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) ordered by
+ /// the given fields in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ []
+ let writeByContainsOrdered tableName writer (criteria: obj) orderFields =
+ WithProps.Json.writeByContainsOrdered tableName writer criteria orderFields (fromDataSource ())
+
+ /// Retrieve JSON documents matching a JSON Path match query (@?)
+ /// The table from which documents should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// All JSON documents matching the given JSON Path expression
+ []
+ let byJsonPath tableName jsonPath =
+ WithProps.Json.byJsonPath tableName jsonPath (fromDataSource ())
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?)
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The JSON Path expression to match
+ []
+ let writeByJsonPath tableName writer jsonPath =
+ WithProps.Json.writeByJsonPath tableName writer jsonPath (fromDataSource ())
+
+ ///
+ /// Retrieve JSON documents matching a JSON Path match query (@?) ordered by the given fields in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ /// All JSON documents matching the given JSON Path expression, ordered by the given fields
+ []
+ let byJsonPathOrdered tableName jsonPath orderFields =
+ WithProps.Json.byJsonPathOrdered tableName jsonPath orderFields (fromDataSource ())
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) ordered by the
+ /// given fields in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ []
+ let writeByJsonPathOrdered tableName writer jsonPath orderFields =
+ WithProps.Json.writeByJsonPathOrdered tableName writer jsonPath orderFields (fromDataSource ())
+
+ /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.)
+ /// The table from which a document should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ let firstByFields tableName howMatched fields =
+ WithProps.Json.firstByFields tableName howMatched fields (fromDataSource ())
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching JSON field comparisons
+ /// (->> =, etc.)
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ []
+ let writeFirstByFields tableName writer howMatched fields =
+ WithProps.Json.writeFirstByFields tableName writer howMatched fields (fromDataSource ())
+
+ ///
+ /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the given
+ /// fields in the document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ let firstByFieldsOrdered tableName howMatched queryFields orderFields =
+ WithProps.Json.firstByFieldsOrdered tableName howMatched queryFields orderFields (fromDataSource ())
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching JSON field comparisons
+ /// (->> =, etc.) ordered by the given fields in the document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ []
+ let writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields =
+ WithProps.Json.writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields (fromDataSource ())
+
+ /// Retrieve the first JSON document matching a JSON containment query (@>)
+ /// The table from which a document should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ let firstByContains tableName (criteria: obj) =
+ WithProps.Json.firstByContains tableName criteria (fromDataSource ())
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>)
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ []
+ let writeFirstByContains tableName writer (criteria: obj) =
+ WithProps.Json.writeFirstByContains tableName writer criteria (fromDataSource ())
+
+ ///
+ /// Retrieve the first JSON document matching a JSON containment query (@>) ordered by the given fields in
+ /// the document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ let firstByContainsOrdered tableName (criteria: obj) orderFields =
+ WithProps.Json.firstByContainsOrdered tableName criteria orderFields (fromDataSource ())
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>)
+ /// ordered by the given fields in the document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ []
+ let writeFirstByContainsOrdered tableName writer (criteria: obj) orderFields =
+ WithProps.Json.writeFirstByContainsOrdered tableName writer criteria orderFields (fromDataSource ())
+
+ /// Retrieve the first JSON document matching a JSON Path match query (@?)
+ /// The table from which a document should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ let firstByJsonPath tableName jsonPath =
+ WithProps.Json.firstByJsonPath tableName jsonPath (fromDataSource ())
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?)
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The JSON Path expression to match
+ []
+ let writeFirstByJsonPath tableName writer jsonPath =
+ WithProps.Json.writeFirstByJsonPath tableName writer jsonPath (fromDataSource ())
+
+ ///
+ /// Retrieve the first JSON document matching a JSON Path match query (@?) ordered by the given fields in the
+ /// document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ let firstByJsonPathOrdered tableName jsonPath orderFields =
+ WithProps.Json.firstByJsonPathOrdered tableName jsonPath orderFields (fromDataSource ())
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?)
+ /// ordered by the given fields in the document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ []
+ let writeFirstByJsonPathOrdered tableName writer jsonPath orderFields =
+ WithProps.Json.writeFirstByJsonPathOrdered tableName writer jsonPath orderFields (fromDataSource ())
+
+
/// Commands to update documents
[]
module Update =
@@ -518,7 +877,7 @@ module Patch =
WithProps.Patch.byId tableName docId patch (fromDataSource ())
///
- /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.)
+ /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.)
///
/// The table in which documents should be patched (may include schema)
/// Whether to match any or all of the field conditions
@@ -528,7 +887,7 @@ module Patch =
let byFields tableName howMatched fields (patch: 'TPatch) =
WithProps.Patch.byFields tableName howMatched fields patch (fromDataSource ())
- /// Patch documents using a JSON containment query in the WHERE clause (@>)
+ /// Patch documents using a JSON containment query in the WHERE clause (@>)
/// The table in which documents should be patched (may include schema)
/// The document to match the containment query
/// The partial document to patch the existing document
@@ -536,7 +895,7 @@ module Patch =
let byContains tableName (criteria: 'TCriteria) (patch: 'TPatch) =
WithProps.Patch.byContains tableName criteria patch (fromDataSource ())
- /// Patch documents using a JSON Path match query in the WHERE clause (@?)
+ /// Patch documents using a JSON Path match query in the WHERE clause (@?)
/// The table in which documents should be patched (may include schema)
/// The JSON Path expression to match
/// The partial document to patch the existing document
@@ -566,7 +925,7 @@ module RemoveFields =
let byFields tableName howMatched fields fieldNames =
WithProps.RemoveFields.byFields tableName howMatched fields fieldNames (fromDataSource ())
- /// Remove fields from documents via a JSON containment query (@>)
+ /// Remove fields from documents via a JSON containment query (@>)
/// The table in which documents should be modified (may include schema)
/// The document to match the containment query
/// One or more field names to remove from the matching documents
@@ -574,7 +933,7 @@ module RemoveFields =
let byContains tableName (criteria: 'TContains) fieldNames =
WithProps.RemoveFields.byContains tableName criteria fieldNames (fromDataSource ())
- /// Remove fields from documents via a JSON Path match query (@?)
+ /// Remove fields from documents via a JSON Path match query (@?)
/// The table in which documents should be modified (may include schema)
/// The JSON Path expression to match
/// One or more field names to remove from the matching documents
@@ -594,7 +953,7 @@ module Delete =
let byId tableName (docId: 'TKey) =
WithProps.Delete.byId tableName docId (fromDataSource ())
- /// Delete documents by matching a JSON field comparison query (->> =, etc.)
+ /// Delete documents by matching a JSON field comparison query (->> =, etc.)
/// The table in which documents should be deleted (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
@@ -602,7 +961,7 @@ module Delete =
let byFields tableName howMatched fields =
WithProps.Delete.byFields tableName howMatched fields (fromDataSource ())
- /// Delete documents by matching a JSON contains query (@>)
+ /// Delete documents by matching a JSON contains query (@>)
/// The table in which documents should be deleted (may include schema)
/// The document to match the containment query
[]
diff --git a/src/Postgres/Library.fs b/src/Postgres/Library.fs
index edf03af..230c7f2 100644
--- a/src/Postgres/Library.fs
+++ b/src/Postgres/Library.fs
@@ -4,11 +4,11 @@
[]
type DocumentIndex =
- /// A GIN index with standard operations (all operators supported)
+ /// A GIN index with standard operations (all operators supported)
| Full
///
- /// A GIN index with JSONPath operations (optimized for @>, @?, @@ operators)
+ /// A GIN index with JSON Path operations (optimized for @>, @?, @@ operators)
///
| Optimized
@@ -94,7 +94,7 @@ module Parameters =
name, Sql.jsonb (Configuration.serializer().Serialize it)
/// Create JSON field parameters
- /// The Fields to convert to parameters
+ /// The Fields to convert to parameters
/// The current parameters for the query
/// A unified sequence of parameter names and values
[]
@@ -129,7 +129,7 @@ module Parameters =
/// Append JSON field name parameters for the given field names to the given parameters
/// The names of fields to be addressed
- /// The name (@name) and parameter value for the field names
+ /// The name (@name) and parameter value for the field names
[]
let fieldNameParams (fieldNames: string seq) =
if Seq.length fieldNames = 1 then "@name", Sql.string (Seq.head fieldNames)
@@ -145,12 +145,10 @@ module Parameters =
[]
module Query =
- ///
- /// Create a WHERE clause fragment to implement a comparison on fields in a JSON document
- ///
+ /// Create a WHERE clause fragment to implement a comparison on fields in a JSON document
/// How the fields should be matched
/// The fields for the comparisons
- /// A WHERE clause implementing the comparisons for the given fields
+ /// A WHERE clause implementing the comparisons for the given fields
[]
let whereByFields (howMatched: FieldMatch) fields =
let name = ParameterName()
@@ -179,9 +177,9 @@ module Query =
else $"{it.Path PostgreSQL AsSql} {it.Comparison.OpSql} {param}")
|> String.concat $" {howMatched} "
- /// Create a WHERE clause fragment to implement an ID-based query
+ /// Create a WHERE clause fragment to implement an ID-based query
/// The ID of the document
- /// A WHERE clause fragment identifying a document by its ID
+ /// A WHERE clause fragment identifying a document by its ID
[]
let whereById<'TKey> (docId: 'TKey) =
whereByFields Any [ { Field.Equal (Configuration.idField ()) docId with ParameterName = Some "@id" } ]
@@ -206,32 +204,28 @@ module Query =
let tableName = name.Split '.' |> Array.last
$"CREATE INDEX IF NOT EXISTS idx_{tableName}_document ON {name} USING GIN (data{extraOps})"
- ///
- /// Create a WHERE clause fragment to implement a @> (JSON contains) condition
- ///
+ /// Create a WHERE clause fragment to implement a @> (JSON contains) condition
/// The parameter name for the query
- /// A WHERE clause fragment for the contains condition
+ /// A WHERE clause fragment for the contains condition
[]
let whereDataContains paramName =
$"data @> %s{paramName}"
- ///
- /// Create a WHERE clause fragment to implement a @? (JSON Path match) condition
- ///
+ /// Create a WHERE clause fragment to implement a @? (JSON Path match) condition
/// The parameter name for the query
- /// A WHERE clause fragment for the JSON Path match condition
+ /// A WHERE clause fragment for the JSON Path match condition
[]
let whereJsonPathMatches paramName =
$"data @? %s{paramName}::jsonpath"
- /// Create an UPDATE statement to patch documents
+ /// Create an UPDATE statement to patch documents
/// The table to be updated
/// A query to patch documents
[]
let patch tableName =
$"UPDATE %s{tableName} SET data = data || @data"
- /// Create an UPDATE statement to remove fields from documents
+ /// Create an UPDATE statement to remove fields from documents
/// The table to be updated
/// A query to remove fields from documents
[]
@@ -270,6 +264,8 @@ module Query =
Query.statementWhere statement (whereJsonPathMatches "@path")
+open System.Text
+
/// Functions for dealing with results
[]
module Results =
@@ -289,16 +285,67 @@ module Results =
let fromData<'T> row : 'T =
fromDocument "data" row
- /// Extract a count from the column it
+ /// Extract a count from the column it
/// A row reader set to the row with the count to retrieve
/// The count from the row
[]
let toCount (row: RowReader) =
row.int "it"
- /// Extract a true/false value from the column it
+ /// Extract a true/false value from the column it
/// A row reader set to the row with the true/false value to retrieve
/// The true/false value from the row
[]
let toExists (row: RowReader) =
row.bool "it"
+
+ /// Extract a JSON document, specifying the field in which the document is found
+ /// The field name containing the JSON document
+ /// A row reader set to the row with the document to be extracted
+ /// The JSON from the given field (an empty object if no field exists)
+ []
+ let jsonFromDocument field (row: RowReader) =
+ row.stringOrNone field |> Option.defaultValue "{}"
+
+ /// Extract a JSON document
+ /// A row reader set to the row with the document to be extracted
+ /// The JSON from the row (an empty object if no field exists)
+ []
+ let jsonFromData row =
+ jsonFromDocument "data" row
+
+ /// Create a JSON array of items for the results of a query
+ /// The mapping function to extract JSON from the query's results
+ /// The query from which JSON should be extracted
+ /// A JSON array as a string; no results will produce an empty array ("[]")
+ []
+ let toJsonArray (mapFunc: RowReader -> string) sqlProps = backgroundTask {
+ let output = StringBuilder("[")
+ do! sqlProps
+ |> Sql.iterAsync (fun it ->
+ if output.Length > 2 then ignore (output.Append ",")
+ mapFunc it |> output.Append |> ignore)
+ return output.Append("]").ToString()
+ }
+
+ /// Create a JSON array of items for the results of a query
+ /// The mapping function to extract JSON from the query's results
+ /// The query from which JSON should be extracted
+ /// A JSON array as a string; no results will produce an empty array ("[]")
+ let ToJsonArray(mapFunc: System.Func, sqlProps) =
+ toJsonArray mapFunc.Invoke sqlProps
+
+ /// Write a JSON array of items for the results of a query to the given PipeWriter
+ /// The PipeWriter to which results should be written
+ /// The mapping function to extract JSON from the query's results
+ /// The query from which JSON should be extracted
+ []
+ let writeJsonArray writer (mapFunc: RowReader -> string) sqlProps =
+ sqlProps |> Sql.toSeq mapFunc |> PipeWriter.writeStrings writer
+
+ /// Write a JSON array of items for the results of a query to the given PipeWriter
+ /// The PipeWriter to which results should be written
+ /// The mapping function to extract JSON from the query's results
+ /// The query from which JSON should be extracted
+ let WriteJsonArray(writer, mapFunc: System.Func, sqlProps) =
+ writeJsonArray writer mapFunc.Invoke sqlProps
diff --git a/src/Postgres/README.md b/src/Postgres/README.md
index ff442c9..d0b6cf2 100644
--- a/src/Postgres/README.md
+++ b/src/Postgres/README.md
@@ -13,7 +13,7 @@ This package provides a lightweight document library backed by [PostgreSQL](http
## Upgrading from v3
-There is a breaking API change for `ByField` (C#) / `byField` (F#), along with a compatibility namespace that can mitigate the impact of these changes. See [the migration guide](https://bitbadger.solutions/open-source/relational-documents/upgrade-from-v3-to-v4.html) for full details.
+There is a breaking API change for `ByField` (C#) / `byField` (F#), along with a compatibility namespace that can mitigate the impact of these changes. See [the migration guide](https://relationaldocs.bitbadger.solutions/dotnet/upgrade/v4.html) for full details.
## Getting Started
@@ -71,7 +71,7 @@ var customer = await Find.ById("customer", "123");
// Find.byId type signature is string -> 'TKey -> Task<'TDoc option>
let! customer = Find.byId "customer" "123"
```
-_(keys are treated as strings or numbers depending on their defintion; however, they are indexed as strings)_
+_(keys are treated as strings or numbers depending on their definition; however, they are indexed as strings)_
Count customers in Atlanta (using JSON containment):
@@ -103,4 +103,4 @@ do! Delete.byJsonPath "customer" """$.City ? (@ == "Chicago")"""
## More Information
-The [project site](https://bitbadger.solutions/open-source/relational-documents/) has full details on how to use this library.
+The [project site](https://relationaldocs.bitbadger.solutions/dotnet/) has full details on how to use this library.
diff --git a/src/Postgres/WithProps.fs b/src/Postgres/WithProps.fs
index da7bc86..16901ff 100644
--- a/src/Postgres/WithProps.fs
+++ b/src/Postgres/WithProps.fs
@@ -1,4 +1,4 @@
-/// Versions of queries that accept SqlProps as the last parameter
+/// Versions of queries that accept SqlProps as the last parameter
module BitBadger.Documents.Postgres.WithProps
open BitBadger.Documents
@@ -14,7 +14,7 @@ module Custom =
/// The query to retrieve the results
/// Parameters to use for the query
/// The mapping function between the document and the domain item
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// A list of results for the given query
[]
let list<'TDoc> query parameters (mapFunc: RowReader -> 'TDoc) sqlProps =
@@ -26,22 +26,64 @@ module Custom =
/// The query to retrieve the results
/// Parameters to use for the query
/// The mapping function between the document and the domain item
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// A list of results for the given query
let List<'TDoc>(query, parameters, mapFunc: System.Func, sqlProps) = backgroundTask {
let! results = list<'TDoc> query parameters mapFunc.Invoke sqlProps
return ResizeArray results
}
+ /// Execute a query that returns a JSON array of results
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The mapping function to extract the document
+ /// The SqlProps to use to execute the query
+ /// A JSON array of results for the given query
+ []
+ let jsonArray query parameters (mapFunc: RowReader -> string) sqlProps =
+ Sql.query query sqlProps
+ |> Sql.parameters (FSharpList.ofSeq parameters)
+ |> toJsonArray mapFunc
+
+ /// Execute a query that returns a JSON array of results
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The mapping function to extract the document
+ /// The SqlProps to use to execute the query
+ /// A JSON array of results for the given query
+ let JsonArray(query, parameters, mapFunc: System.Func, sqlProps) =
+ jsonArray query parameters mapFunc.Invoke sqlProps
+
+ /// Execute a query, writing its results to the given PipeWriter
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The PipeWriter to which the results should be written
+ /// The mapping function to extract the document
+ /// The SqlProps to use to execute the query
+ []
+ let writeJsonArray query parameters writer (mapFunc: RowReader -> string) sqlProps =
+ Sql.query query sqlProps
+ |> Sql.parameters (FSharpList.ofSeq parameters)
+ |> writeJsonArray writer mapFunc
+
+ /// Execute a query, writing its results to the given PipeWriter
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The PipeWriter to which the results should be written
+ /// The mapping function to extract the document
+ /// The SqlProps to use to execute the query
+ let WriteJsonArray(query, parameters, writer, mapFunc: System.Func, sqlProps) =
+ writeJsonArray query parameters writer mapFunc.Invoke sqlProps
+
/// Execute a query that returns one or no results
/// The query to retrieve the results
/// Parameters to use for the query
/// The mapping function between the document and the domain item
- /// The SqlProps to use to execute the query
- /// Some with the first matching result, or None if not found
+ /// The SqlProps to use to execute the query
+ /// Some with the first matching result, or None if not found
[]
let single<'TDoc> query parameters mapFunc sqlProps = backgroundTask {
- let! results = list<'TDoc> query parameters mapFunc sqlProps
+ let! results = list<'TDoc> $"{query} LIMIT 1" parameters mapFunc sqlProps
return FSharpList.tryHead results
}
@@ -49,18 +91,39 @@ module Custom =
/// The query to retrieve the results
/// Parameters to use for the query
/// The mapping function between the document and the domain item
- /// The SqlProps to use to execute the query
- /// The first matching result, or null if not found
+ /// The SqlProps to use to execute the query
+ /// The first matching result, or null if not found
let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>(
query, parameters, mapFunc: System.Func, sqlProps) = backgroundTask {
let! result = single<'TDoc> query parameters mapFunc.Invoke sqlProps
return Option.toObj result
}
+ /// Execute a query that returns one or no JSON documents
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The mapping function to extract the document
+ /// The SqlProps to use to execute the query
+ /// The JSON document with the first matching result, or an empty document if not found
+ []
+ let jsonSingle query parameters mapFunc sqlProps = backgroundTask {
+ let! results = jsonArray $"%s{query} LIMIT 1" parameters mapFunc sqlProps
+ return if results = "[]" then "{}" else results[1..results.Length - 2]
+ }
+
+ /// Execute a query that returns one or no JSON documents
+ /// The query to retrieve the results
+ /// Parameters to use for the query
+ /// The mapping function to extract the document
+ /// The SqlProps to use to execute the query
+ /// The JSON document with the first matching result, or an empty document if not found
+ let JsonSingle(query, parameters, mapFunc: System.Func, sqlProps) =
+ jsonSingle query parameters mapFunc.Invoke sqlProps
+
/// Execute a query that returns no results
/// The query to retrieve the results
/// Parameters to use for the query
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
[]
let nonQuery query parameters sqlProps =
Sql.query query sqlProps
@@ -72,7 +135,7 @@ module Custom =
/// The query to retrieve the value
/// Parameters to use for the query
/// The mapping function to obtain the value
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// The scalar value for the query
[]
let scalar<'T when 'T: struct> query parameters (mapFunc: RowReader -> 'T) sqlProps =
@@ -84,7 +147,7 @@ module Custom =
/// The query to retrieve the value
/// Parameters to use for the query
/// The mapping function to obtain the value
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// The scalar value for the query
let Scalar<'T when 'T: struct>(query, parameters, mapFunc: System.Func, sqlProps) =
scalar<'T> query parameters mapFunc.Invoke sqlProps
@@ -94,7 +157,7 @@ module Definition =
/// Create a document table
/// The table whose existence should be ensured (may include schema)
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
[]
let ensureTable name sqlProps = backgroundTask {
do! Custom.nonQuery (Query.Definition.ensureTable name) [] sqlProps
@@ -104,7 +167,7 @@ module Definition =
/// Create an index on documents in the specified table
/// The table to be indexed (may include schema)
/// The type of document index to create
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
[]
let ensureDocumentIndex name idxType sqlProps =
Custom.nonQuery (Query.Definition.ensureDocumentIndex name idxType) [] sqlProps
@@ -113,7 +176,7 @@ module Definition =
/// The table to be indexed (may include schema)
/// The name of the index to create
/// One or more fields to be indexed
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
[]
let ensureFieldIndex tableName indexName fields sqlProps =
Custom.nonQuery (Query.Definition.ensureIndexOn tableName indexName fields PostgreSQL) [] sqlProps
@@ -125,7 +188,7 @@ module Document =
/// Insert a new document
/// The table into which the document should be inserted (may include schema)
/// The document to be inserted
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
[]
let insert<'TDoc> tableName (document: 'TDoc) sqlProps =
let query =
@@ -149,7 +212,7 @@ module Document =
/// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert")
/// The table into which the document should be saved (may include schema)
/// The document to be saved
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
[]
let save<'TDoc> tableName (document: 'TDoc) sqlProps =
Custom.nonQuery (Query.save tableName) [ jsonParam "@data" document ] sqlProps
@@ -160,37 +223,37 @@ module Count =
/// Count all documents in a table
/// The table in which documents should be counted (may include schema)
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// The count of the documents in the table
[]
let all tableName sqlProps =
Custom.scalar (Query.count tableName) [] toCount sqlProps
- /// Count matching documents using JSON field comparisons (->> =, etc.)
+ /// Count matching documents using JSON field comparisons (->> =, etc.)
/// The table in which documents should be counted (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// The count of matching documents in the table
[]
let byFields tableName howMatched fields sqlProps =
Custom.scalar
(Query.byFields (Query.count tableName) howMatched fields) (addFieldParams fields []) toCount sqlProps
- /// Count matching documents using a JSON containment query (@>)
+ /// Count matching documents using a JSON containment query (@>)
/// The table in which documents should be counted (may include schema)
/// The document to match with the containment query
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// The count of the documents in the table
[]
let byContains tableName (criteria: 'TContains) sqlProps =
Custom.scalar
(Query.byContains (Query.count tableName)) [ jsonParam "@criteria" criteria ] toCount sqlProps
- /// Count matching documents using a JSON Path match query (@?)
+ /// Count matching documents using a JSON Path match query (@?)
/// The table in which documents should be counted (may include schema)
/// The JSON Path expression to be matched
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// The count of the documents in the table
[]
let byJsonPath tableName jsonPath sqlProps =
@@ -204,17 +267,17 @@ module Exists =
/// Determine if a document exists for the given ID
/// The table in which existence should be checked (may include schema)
/// The ID of the document whose existence should be checked
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// True if a document exists, false if not
[]
let byId tableName (docId: 'TKey) sqlProps =
Custom.scalar (Query.exists tableName (Query.whereById docId)) [ idParam docId ] toExists sqlProps
- /// Determine if a document exists using JSON field comparisons (->> =, etc.)
+ /// Determine if a document exists using JSON field comparisons (->> =, etc.)
/// The table in which existence should be checked (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// True if any matching documents exist, false if not
[]
let byFields tableName howMatched fields sqlProps =
@@ -224,10 +287,10 @@ module Exists =
toExists
sqlProps
- /// Determine if a document exists using a JSON containment query (@>)
+ /// Determine if a document exists using a JSON containment query (@>)
/// The table in which existence should be checked (may include schema)
/// The document to match with the containment query
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// True if any matching documents exist, false if not
[]
let byContains tableName (criteria: 'TContains) sqlProps =
@@ -237,10 +300,10 @@ module Exists =
toExists
sqlProps
- /// Determine if a document exists using a JSON Path match query (@?)
+ /// Determine if a document exists using a JSON Path match query (@?)
/// The table in which existence should be checked (may include schema)
/// The JSON Path expression to be matched
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// True if any matching documents exist, false if not
[]
let byJsonPath tableName jsonPath sqlProps =
@@ -250,13 +313,13 @@ module Exists =
toExists
sqlProps
-/// Commands to retrieve documents
+/// Commands to retrieve documents as domain objects
[]
module Find =
/// Retrieve all documents in the given table
/// The table from which documents should be retrieved (may include schema)
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents from the given table
[]
let all<'TDoc> tableName sqlProps =
@@ -264,7 +327,7 @@ module Find =
/// Retrieve all documents in the given table
/// The table from which documents should be retrieved (may include schema)
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents from the given table
let All<'TDoc>(tableName, sqlProps) =
Custom.List<'TDoc>(Query.find tableName, [], fromData<'TDoc>, sqlProps)
@@ -272,7 +335,7 @@ module Find =
/// Retrieve all documents in the given table ordered by the given fields in the document
/// The table from which documents should be retrieved (may include schema)
/// Fields by which the results should be ordered
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents from the given table, ordered by the given fields
[]
let allOrdered<'TDoc> tableName orderFields sqlProps =
@@ -281,7 +344,7 @@ module Find =
/// Retrieve all documents in the given table ordered by the given fields in the document
/// The table from which documents should be retrieved (may include schema)
/// Fields by which the results should be ordered
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents from the given table, ordered by the given fields
let AllOrdered<'TDoc>(tableName, orderFields, sqlProps) =
Custom.List<'TDoc>(
@@ -290,8 +353,8 @@ module Find =
/// Retrieve a document by its ID
/// The table from which a document should be retrieved (may include schema)
/// The ID of the document to retrieve
- /// The SqlProps to use to execute the query
- /// Some with the document if found, None otherwise
+ /// The SqlProps to use to execute the query
+ /// Some with the document if found, None otherwise
[]
let byId<'TKey, 'TDoc> tableName (docId: 'TKey) sqlProps =
Custom.single (Query.byId (Query.find tableName) docId) [ idParam docId ] fromData<'TDoc> sqlProps
@@ -299,17 +362,17 @@ module Find =
/// Retrieve a document by its ID
/// The table from which a document should be retrieved (may include schema)
/// The ID of the document to retrieve
- /// The SqlProps to use to execute the query
- /// The document if found, null otherwise
+ /// The SqlProps to use to execute the query
+ /// The document if found, null otherwise
let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId: 'TKey, sqlProps) =
Custom.Single<'TDoc>(
Query.byId (Query.find tableName) docId, [ idParam docId ], fromData<'TDoc>, sqlProps)
- /// Retrieve documents matching JSON field comparisons (->> =, etc.)
+ /// Retrieve documents matching JSON field comparisons (->> =, etc.)
/// The table from which documents should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents matching the given fields
[]
let byFields<'TDoc> tableName howMatched fields sqlProps =
@@ -319,11 +382,11 @@ module Find =
fromData<'TDoc>
sqlProps
- /// Retrieve documents matching JSON field comparisons (->> =, etc.)
+ /// Retrieve documents matching JSON field comparisons (->> =, etc.)
/// The table from which documents should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents matching the given fields
let ByFields<'TDoc>(tableName, howMatched, fields, sqlProps) =
Custom.List<'TDoc>(
@@ -333,14 +396,14 @@ module Find =
sqlProps)
///
- /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in
+ /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in
/// the document
///
/// The table from which documents should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
/// Fields by which the results should be ordered
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents matching the given fields, ordered by the other given fields
[]
let byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields sqlProps =
@@ -351,14 +414,14 @@ module Find =
sqlProps
///
- /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in
- /// the document
+ /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the
+ /// document
///
/// The table from which documents should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
/// Fields by which the results should be ordered
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents matching the given fields, ordered by the other given fields
let ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, sqlProps) =
Custom.List<'TDoc>(
@@ -367,20 +430,20 @@ module Find =
fromData<'TDoc>,
sqlProps)
- /// Retrieve documents matching a JSON containment query (@>)
+ /// Retrieve documents matching a JSON containment query (@>)
/// The table from which documents should be retrieved (may include schema)
/// The document to match with the containment query
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents matching the given containment query
[]
let byContains<'TDoc> tableName (criteria: obj) sqlProps =
Custom.list<'TDoc>
(Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] fromData<'TDoc> sqlProps
- /// Retrieve documents matching a JSON containment query (@>)
+ /// Retrieve documents matching a JSON containment query (@>)
/// The table from which documents should be retrieved (may include schema)
/// The document to match with the containment query
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents matching the given containment query
let ByContains<'TDoc>(tableName, criteria: obj, sqlProps) =
Custom.List<'TDoc>(
@@ -390,13 +453,12 @@ module Find =
sqlProps)
///
- /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the
- /// document
+ /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the document
///
/// The table from which documents should be retrieved (may include schema)
/// The document to match with the containment query
/// Fields by which the results should be ordered
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents matching the given containment query, ordered by the given fields
[]
let byContainsOrdered<'TDoc> tableName (criteria: obj) orderFields sqlProps =
@@ -407,13 +469,12 @@ module Find =
sqlProps
///
- /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the
- /// document
+ /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the document
///
/// The table from which documents should be retrieved (may include schema)
/// The document to match with the containment query
/// Fields by which the results should be ordered
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents matching the given containment query, ordered by the given fields
let ByContainsOrdered<'TDoc>(tableName, criteria: obj, orderFields, sqlProps) =
Custom.List<'TDoc>(
@@ -422,20 +483,20 @@ module Find =
fromData<'TDoc>,
sqlProps)
- /// Retrieve documents matching a JSON Path match query (@?)
+ /// Retrieve documents matching a JSON Path match query (@?)
/// The table from which documents should be retrieved (may include schema)
/// The JSON Path expression to match
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents matching the given JSON Path expression
[]
let byJsonPath<'TDoc> tableName jsonPath sqlProps =
Custom.list<'TDoc>
(Query.byPathMatch (Query.find tableName)) [ "@path", Sql.string jsonPath ] fromData<'TDoc> sqlProps
- /// Retrieve documents matching a JSON Path match query (@?)
+ /// Retrieve documents matching a JSON Path match query (@?)
/// The table from which documents should be retrieved (may include schema)
/// The JSON Path expression to match
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents matching the given JSON Path expression
let ByJsonPath<'TDoc>(tableName, jsonPath, sqlProps) =
Custom.List<'TDoc>(
@@ -445,12 +506,12 @@ module Find =
sqlProps)
///
- /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document
+ /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document
///
/// The table from which documents should be retrieved (may include schema)
/// The JSON Path expression to match
/// Fields by which the results should be ordered
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents matching the given JSON Path expression, ordered by the given fields
[]
let byJsonPathOrdered<'TDoc> tableName jsonPath orderFields sqlProps =
@@ -461,12 +522,12 @@ module Find =
sqlProps
///
- /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document
+ /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document
///
/// The table from which documents should be retrieved (may include schema)
/// The JSON Path expression to match
/// Fields by which the results should be ordered
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
/// All documents matching the given JSON Path expression, ordered by the given fields
let ByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, sqlProps) =
Custom.List<'TDoc>(
@@ -475,192 +536,599 @@ module Find =
fromData<'TDoc>,
sqlProps)
- /// Retrieve the first document matching JSON field comparisons (->> =, etc.)
+ /// Retrieve the first document matching JSON field comparisons (->> =, etc.)
/// The table from which a document should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
- /// The SqlProps to use to execute the query
- /// Some with the first document, or None if not found
+ /// The SqlProps to use to execute the query
+ /// Some with the first document, or None if not found
[]
let firstByFields<'TDoc> tableName howMatched fields sqlProps =
Custom.single<'TDoc>
- $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1"
+ (Query.byFields (Query.find tableName) howMatched fields)
(addFieldParams fields [])
fromData<'TDoc>
sqlProps
- /// Retrieve the first document matching JSON field comparisons (->> =, etc.)
+ /// Retrieve the first document matching JSON field comparisons (->> =, etc.)
/// The table from which a document should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
- /// The SqlProps to use to execute the query
- /// The first document, or null if not found
+ /// The SqlProps to use to execute the query
+ /// The first document, or null if not found
let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields, sqlProps) =
Custom.Single<'TDoc>(
- $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1",
+ Query.byFields (Query.find tableName) howMatched fields,
addFieldParams fields [],
fromData<'TDoc>,
sqlProps)
///
- /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given
+ /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given
/// fields in the document
///
/// The table from which a document should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
/// Fields by which the results should be ordered
- /// The SqlProps to use to execute the query
- ///
- /// Some with the first document ordered by the given fields, or None if not found
- ///
+ /// The SqlProps to use to execute the query
+ /// Some with the first document ordered by the given fields, or None if not found
[]
let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields sqlProps =
Custom.single<'TDoc>
- $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields PostgreSQL} LIMIT 1"
+ (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields PostgreSQL)
(addFieldParams queryFields [])
fromData<'TDoc>
sqlProps
///
- /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given
+ /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given
/// fields in the document
///
/// The table from which a document should be retrieved (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
/// Fields by which the results should be ordered
- /// The SqlProps to use to execute the query
- /// The first document ordered by the given fields, or null if not found
+ /// The SqlProps to use to execute the query
+ /// The first document ordered by the given fields, or null if not found
let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>(
tableName, howMatched, queryFields, orderFields, sqlProps) =
Custom.Single<'TDoc>(
- $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields PostgreSQL} LIMIT 1",
+ Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields PostgreSQL,
addFieldParams queryFields [],
fromData<'TDoc>,
sqlProps)
- /// Retrieve the first document matching a JSON containment query (@>)
+ /// Retrieve the first document matching a JSON containment query (@>)
/// The table from which a document should be retrieved (may include schema)
/// The document to match with the containment query
- /// The SqlProps to use to execute the query
- /// Some with the first document, or None if not found
+ /// The SqlProps to use to execute the query
+ /// Some with the first document, or None if not found
[]
let firstByContains<'TDoc> tableName (criteria: obj) sqlProps =
Custom.single<'TDoc>
- $"{Query.byContains (Query.find tableName)} LIMIT 1"
- [ jsonParam "@criteria" criteria ]
- fromData<'TDoc>
- sqlProps
+ (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] fromData<'TDoc> sqlProps
- /// Retrieve the first document matching a JSON containment query (@>)
+ /// Retrieve the first document matching a JSON containment query (@>)
/// The table from which a document should be retrieved (may include schema)
/// The document to match with the containment query
- /// The SqlProps to use to execute the query
- /// The first document, or null if not found
+ /// The SqlProps to use to execute the query
+ /// The first document, or null if not found
let FirstByContains<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, criteria: obj, sqlProps) =
Custom.Single<'TDoc>(
- $"{Query.byContains (Query.find tableName)} LIMIT 1",
- [ jsonParam "@criteria" criteria ],
- fromData<'TDoc>,
- sqlProps)
+ Query.byContains (Query.find tableName), [ jsonParam "@criteria" criteria ], fromData<'TDoc>, sqlProps)
///
- /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in
- /// the document
+ /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the
+ /// document
///
/// The table from which a document should be retrieved (may include schema)
/// The document to match with the containment query
/// Fields by which the results should be ordered
- /// The SqlProps to use to execute the query
- ///
- /// Some with the first document ordered by the given fields, or None if not found
- ///
+ /// The SqlProps to use to execute the query
+ /// Some with the first document ordered by the given fields, or None if not found
[]
let firstByContainsOrdered<'TDoc> tableName (criteria: obj) orderFields sqlProps =
Custom.single<'TDoc>
- $"{Query.byContains (Query.find tableName)}{Query.orderBy orderFields PostgreSQL} LIMIT 1"
+ (Query.byContains (Query.find tableName) + Query.orderBy orderFields PostgreSQL)
[ jsonParam "@criteria" criteria ]
fromData<'TDoc>
sqlProps
///
- /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in
+ /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the
+ /// document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ /// The SqlProps to use to execute the query
+ /// The first document ordered by the given fields, or null if not found
+ let FirstByContainsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>(
+ tableName, criteria: obj, orderFields, sqlProps) =
+ Custom.Single<'TDoc>(
+ Query.byContains (Query.find tableName) + Query.orderBy orderFields PostgreSQL,
+ [ jsonParam "@criteria" criteria ],
+ fromData<'TDoc>,
+ sqlProps)
+
+ /// Retrieve the first document matching a JSON Path match query (@?)
+ /// The table from which a document should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// The SqlProps to use to execute the query
+ /// Some with the first document, or None if not found
+ []
+ let firstByJsonPath<'TDoc> tableName jsonPath sqlProps =
+ Custom.single<'TDoc>
+ (Query.byPathMatch (Query.find tableName))
+ [ "@path", Sql.string jsonPath ]
+ fromData<'TDoc>
+ sqlProps
+
+ /// Retrieve the first document matching a JSON Path match query (@?)
+ /// The table from which a document should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// The SqlProps to use to execute the query
+ /// The first document, or null if not found
+ let FirstByJsonPath<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, jsonPath, sqlProps) =
+ Custom.Single<'TDoc>(
+ Query.byPathMatch (Query.find tableName),
+ [ "@path", Sql.string jsonPath ],
+ fromData<'TDoc>,
+ sqlProps)
+
+ ///
+ /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the
+ /// document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ /// The SqlProps to use to execute the query
+ /// Some with the first document ordered by the given fields, or None if not found
+ []
+ let firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields sqlProps =
+ Custom.single<'TDoc>
+ (Query.byPathMatch (Query.find tableName) + Query.orderBy orderFields PostgreSQL)
+ [ "@path", Sql.string jsonPath ]
+ fromData<'TDoc>
+ sqlProps
+
+ ///
+ /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the
+ /// document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ /// The SqlProps to use to execute the query
+ /// The first document ordered by the given fields, or null if not found
+ let FirstByJsonPathOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>(
+ tableName, jsonPath, orderFields, sqlProps) =
+ Custom.Single<'TDoc>(
+ Query.byPathMatch (Query.find tableName) + Query.orderBy orderFields PostgreSQL,
+ [ "@path", Sql.string jsonPath ],
+ fromData<'TDoc>,
+ sqlProps)
+
+/// Commands to retrieve documents as JSON
+[]
+module Json =
+
+ /// Retrieve all documents in the given table as a JSON array
+ /// The table from which documents should be retrieved (may include schema)
+ /// The SqlProps to use to execute the query
+ /// All documents from the given table as a JSON array
+ []
+ let all tableName sqlProps =
+ Custom.jsonArray (Query.find tableName) [] jsonFromData sqlProps
+
+ /// Write all documents in the given table to the given PipeWriter
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The SqlProps to use to execute the query
+ []
+ let writeAll tableName writer sqlProps =
+ Custom.writeJsonArray (Query.find tableName) [] writer jsonFromData sqlProps
+
+ ///
+ /// Retrieve all documents in the given table as a JSON array, ordered by the given fields in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// Fields by which the results should be ordered
+ /// The SqlProps to use to execute the query
+ /// All documents from the given table as a JSON array, ordered by the given fields
+ []
+ let allOrdered tableName orderFields sqlProps =
+ Custom.jsonArray (Query.find tableName + Query.orderBy orderFields PostgreSQL) [] jsonFromData sqlProps
+
+ ///
+ /// Write all documents in the given table to the given PipeWriter, ordered by the given fields in the
+ /// document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Fields by which the results should be ordered
+ /// The SqlProps to use to execute the query
+ []
+ let writeAllOrdered tableName writer orderFields sqlProps =
+ Custom.writeJsonArray
+ (Query.find tableName + Query.orderBy orderFields PostgreSQL) [] writer jsonFromData sqlProps
+
+ /// Retrieve a JSON document by its ID
+ /// The table from which a document should be retrieved (may include schema)
+ /// The ID of the document to retrieve
+ /// The SqlProps to use to execute the query
+ /// The JSON document if found, an empty JSON document otherwise
+ []
+ let byId<'TKey> tableName (docId: 'TKey) sqlProps =
+ Custom.jsonSingle (Query.byId (Query.find tableName) docId) [ idParam docId ] jsonFromData sqlProps
+
+ /// Write a JSON document to the given PipeWriter by its ID
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The ID of the document to retrieve
+ /// The SqlProps to use to execute the query
+ []
+ let writeById<'TKey> tableName writer (docId: 'TKey) sqlProps = backgroundTask {
+ let! json = byId tableName docId sqlProps
+ let! _ = PipeWriter.writeString writer json
+ ()
+ }
+
+ /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.)
+ /// The table from which documents should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// The SqlProps to use to execute the query
+ /// All JSON documents matching the given fields
+ []
+ let byFields tableName howMatched fields sqlProps =
+ Custom.jsonArray
+ (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) jsonFromData sqlProps
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.)
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// The SqlProps to use to execute the query
+ []
+ let writeByFields tableName writer howMatched fields sqlProps =
+ Custom.writeJsonArray
+ (Query.byFields (Query.find tableName) howMatched fields)
+ (addFieldParams fields [])
+ writer
+ jsonFromData
+ sqlProps
+
+ ///
+ /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given fields
+ /// in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ /// The SqlProps to use to execute the query
+ /// All JSON documents matching the given fields, ordered by the other given fields
+ []
+ let byFieldsOrdered tableName howMatched queryFields orderFields sqlProps =
+ Custom.jsonArray
+ (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields PostgreSQL)
+ (addFieldParams queryFields [])
+ jsonFromData
+ sqlProps
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.)
+ /// ordered by the given fields in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ /// The SqlProps to use to execute the query
+ []
+ let writeByFieldsOrdered tableName writer howMatched queryFields orderFields sqlProps =
+ Custom.writeJsonArray
+ (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields PostgreSQL)
+ (addFieldParams queryFields [])
+ writer
+ jsonFromData
+ sqlProps
+
+ /// Retrieve JSON documents matching a JSON containment query (@>)
+ /// The table from which documents should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// The SqlProps to use to execute the query
+ /// All JSON documents matching the given containment query
+ []
+ let byContains tableName (criteria: obj) sqlProps =
+ Custom.jsonArray
+ (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] jsonFromData sqlProps
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>)
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ /// The SqlProps to use to execute the query
+ []
+ let writeByContains tableName writer (criteria: obj) sqlProps =
+ Custom.writeJsonArray
+ (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] writer jsonFromData sqlProps
+
+ ///
+ /// Retrieve JSON documents matching a JSON containment query (@>) ordered by the given fields in the
+ /// document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ /// The SqlProps to use to execute the query
+ /// All documents matching the given containment query, ordered by the given fields
+ []
+ let byContainsOrdered tableName (criteria: obj) orderFields sqlProps =
+ Custom.jsonArray
+ (Query.byContains (Query.find tableName) + Query.orderBy orderFields PostgreSQL)
+ [ jsonParam "@criteria" criteria ]
+ jsonFromData
+ sqlProps
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) ordered by
+ /// the given fields in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ /// The SqlProps to use to execute the query
+ []
+ let writeByContainsOrdered tableName writer (criteria: obj) orderFields sqlProps =
+ Custom.writeJsonArray
+ (Query.byContains (Query.find tableName) + Query.orderBy orderFields PostgreSQL)
+ [ jsonParam "@criteria" criteria ]
+ writer
+ jsonFromData
+ sqlProps
+
+ /// Retrieve JSON documents matching a JSON Path match query (@?)
+ /// The table from which documents should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// The SqlProps to use to execute the query
+ /// All JSON documents matching the given JSON Path expression
+ []
+ let byJsonPath tableName jsonPath sqlProps =
+ Custom.jsonArray
+ (Query.byPathMatch (Query.find tableName)) [ "@path", Sql.string jsonPath ] jsonFromData sqlProps
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?)
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The JSON Path expression to match
+ /// The SqlProps to use to execute the query
+ []
+ let writeByJsonPath tableName writer jsonPath sqlProps =
+ Custom.writeJsonArray
+ (Query.byPathMatch (Query.find tableName)) [ "@path", Sql.string jsonPath ] writer jsonFromData sqlProps
+
+ ///
+ /// Retrieve JSON documents matching a JSON Path match query (@?) ordered by the given fields in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ /// The SqlProps to use to execute the query
+ /// All JSON documents matching the given JSON Path expression, ordered by the given fields
+ []
+ let byJsonPathOrdered tableName jsonPath orderFields sqlProps =
+ Custom.jsonArray
+ (Query.byPathMatch (Query.find tableName) + Query.orderBy orderFields PostgreSQL)
+ [ "@path", Sql.string jsonPath ]
+ jsonFromData
+ sqlProps
+
+ ///
+ /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) ordered by the
+ /// given fields in the document
+ ///
+ /// The table from which documents should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The JSON Path expression to match
+ /// Fields by which the results should be ordered
+ /// The SqlProps to use to execute the query
+ []
+ let writeByJsonPathOrdered tableName writer jsonPath orderFields sqlProps =
+ Custom.writeJsonArray
+ (Query.byPathMatch (Query.find tableName) + Query.orderBy orderFields PostgreSQL)
+ [ "@path", Sql.string jsonPath ]
+ writer
+ jsonFromData
+ sqlProps
+
+ /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.)
+ /// The table from which a document should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// The SqlProps to use to execute the query
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ let firstByFields tableName howMatched fields sqlProps =
+ Custom.jsonSingle
+ (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) jsonFromData sqlProps
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching JSON field comparisons(->> =,
+ /// etc.)
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// The SqlProps to use to execute the query
+ []
+ let writeFirstByFields tableName writer howMatched fields sqlProps = backgroundTask {
+ let! json = firstByFields tableName howMatched fields sqlProps
+ let! _ = PipeWriter.writeString writer json
+ ()
+ }
+
+ ///
+ /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the given
+ /// fields in the document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ /// The SqlProps to use to execute the query
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ let firstByFieldsOrdered tableName howMatched queryFields orderFields sqlProps =
+ Custom.jsonSingle
+ (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields PostgreSQL)
+ (addFieldParams queryFields [])
+ jsonFromData
+ sqlProps
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching JSON field comparisons
+ /// (->> =, etc.) ordered by the given fields in the document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// Whether to match any or all of the field conditions
+ /// The field conditions to match
+ /// Fields by which the results should be ordered
+ /// The SqlProps to use to execute the query
+ []
+ let writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields sqlProps = backgroundTask {
+ let! json = firstByFieldsOrdered tableName howMatched queryFields orderFields sqlProps
+ let! _ = PipeWriter.writeString writer json
+ ()
+ }
+
+ /// Retrieve the first JSON document matching a JSON containment query (@>)
+ /// The table from which a document should be retrieved (may include schema)
+ /// The document to match with the containment query
+ /// The SqlProps to use to execute the query
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ let firstByContains tableName (criteria: obj) sqlProps =
+ Custom.jsonSingle
+ (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] jsonFromData sqlProps
+
+ ///
+ /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>)
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ /// The SqlProps to use to execute the query
+ []
+ let writeFirstByContains tableName writer (criteria: obj) sqlProps = backgroundTask {
+ let! json = firstByContains tableName criteria sqlProps
+ let! _ = PipeWriter.writeString writer json
+ ()
+ }
+
+ ///
+ /// Retrieve the first JSON document matching a JSON containment query (@>) ordered by the given fields in
/// the document
///
/// The table from which a document should be retrieved (may include schema)
/// The document to match with the containment query
/// Fields by which the results should be ordered
- /// The SqlProps to use to execute the query
- /// The first document ordered by the given fields, or null if not found
- let FirstByContainsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>(
- tableName, criteria: obj, orderFields, sqlProps) =
- Custom.Single<'TDoc>(
- $"{Query.byContains (Query.find tableName)}{Query.orderBy orderFields PostgreSQL} LIMIT 1",
- [ jsonParam "@criteria" criteria ],
- fromData<'TDoc>,
- sqlProps)
-
- /// Retrieve the first document matching a JSON Path match query (@?)
- /// The table from which a document should be retrieved (may include schema)
- /// The JSON Path expression to match
- /// The SqlProps to use to execute the query
- /// Some with the first document, or None if not found
- []
- let firstByJsonPath<'TDoc> tableName jsonPath sqlProps =
- Custom.single<'TDoc>
- $"{Query.byPathMatch (Query.find tableName)} LIMIT 1"
- [ "@path", Sql.string jsonPath ]
- fromData<'TDoc>
+ /// The SqlProps to use to execute the query
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ let firstByContainsOrdered tableName (criteria: obj) orderFields sqlProps =
+ Custom.jsonSingle
+ (Query.byContains (Query.find tableName) + Query.orderBy orderFields PostgreSQL)
+ [ jsonParam "@criteria" criteria ]
+ jsonFromData
sqlProps
- /// Retrieve the first document matching a JSON Path match query (@?)
+ ///
+ /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>)
+ /// ordered by the given fields in the document
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The document to match with the containment query
+ /// Fields by which the results should be ordered
+ /// The SqlProps to use to execute the query
+ []
+ let writeFirstByContainsOrdered tableName writer (criteria: obj) orderFields sqlProps = backgroundTask {
+ let! json = firstByContainsOrdered tableName criteria orderFields sqlProps
+ let! _ = PipeWriter.writeString writer json
+ ()
+ }
+
+ /// Retrieve the first JSON document matching a JSON Path match query (@?)
/// The table from which a document should be retrieved (may include schema)
/// The JSON Path expression to match
- /// The SqlProps to use to execute the query
- /// The first document, or null if not found
- let FirstByJsonPath<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, jsonPath, sqlProps) =
- Custom.Single<'TDoc>(
- $"{Query.byPathMatch (Query.find tableName)} LIMIT 1",
- [ "@path", Sql.string jsonPath ],
- fromData<'TDoc>,
- sqlProps)
+ /// The SqlProps to use to execute the query
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ let firstByJsonPath tableName jsonPath sqlProps =
+ Custom.jsonSingle
+ (Query.byPathMatch (Query.find tableName)) [ "@path", Sql.string jsonPath ] jsonFromData sqlProps
///
- /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the
+ /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?)
+ ///
+ /// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
+ /// The JSON Path expression to match
+ /// The SqlProps to use to execute the query
+ []
+ let writeFirstByJsonPath tableName writer jsonPath sqlProps = backgroundTask {
+ let! json = firstByJsonPath tableName jsonPath sqlProps
+ let! _ = PipeWriter.writeString writer json
+ ()
+ }
+
+ ///
+ /// Retrieve the first JSON document matching a JSON Path match query (@?) ordered by the given fields in the
/// document
///
/// The table from which a document should be retrieved (may include schema)
/// The JSON Path expression to match
/// Fields by which the results should be ordered
- /// The SqlProps to use to execute the query
- ///
- /// Some with the first document ordered by the given fields, or None if not found
- ///
- []
- let firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields sqlProps =
- Custom.single<'TDoc>
- $"{Query.byPathMatch (Query.find tableName)}{Query.orderBy orderFields PostgreSQL} LIMIT 1"
+ /// The SqlProps to use to execute the query
+ /// The first matching JSON document if found, an empty JSON document otherwise
+ []
+ let firstByJsonPathOrdered tableName jsonPath orderFields sqlProps =
+ Custom.jsonSingle
+ (Query.byPathMatch (Query.find tableName) + Query.orderBy orderFields PostgreSQL)
[ "@path", Sql.string jsonPath ]
- fromData<'TDoc>
+ jsonFromData
sqlProps
///
- /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the
- /// document
+ /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?)
+ /// ordered by the given fields in the document
///
/// The table from which a document should be retrieved (may include schema)
+ /// The PipeWriter to which the results should be written
/// The JSON Path expression to match
/// Fields by which the results should be ordered
- /// The SqlProps to use to execute the query
- /// The first document ordered by the given fields, or null if not found
- let FirstByJsonPathOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>(
- tableName, jsonPath, orderFields, sqlProps) =
- Custom.Single<'TDoc>(
- $"{Query.byPathMatch (Query.find tableName)}{Query.orderBy orderFields PostgreSQL} LIMIT 1",
- [ "@path", Sql.string jsonPath ],
- fromData<'TDoc>,
- sqlProps)
+ /// The SqlProps to use to execute the query
+ []
+ let writeFirstByJsonPathOrdered tableName writer jsonPath orderFields sqlProps = backgroundTask {
+ let! json = firstByJsonPathOrdered tableName jsonPath orderFields sqlProps
+ let! _ = PipeWriter.writeString writer json
+ ()
+ }
/// Commands to update documents
[]
@@ -670,7 +1138,7 @@ module Update =
/// The table in which a document should be updated (may include schema)
/// The ID of the document to be updated (replaced)
/// The new document
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
[]
let byId tableName (docId: 'TKey) (document: 'TDoc) sqlProps =
Custom.nonQuery
@@ -682,7 +1150,7 @@ module Update =
/// The table in which a document should be updated (may include schema)
/// The function to obtain the ID of the document
/// The new document
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
[]
let byFunc tableName (idFunc: 'TDoc -> 'TKey) (document: 'TDoc) sqlProps =
byId tableName (idFunc document) document sqlProps
@@ -693,7 +1161,7 @@ module Update =
/// The table in which a document should be updated (may include schema)
/// The function to obtain the ID of the document
/// The new document
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
let ByFunc(tableName, idFunc: System.Func<'TDoc, 'TKey>, document: 'TDoc, sqlProps) =
byFunc tableName idFunc.Invoke document sqlProps
@@ -705,20 +1173,20 @@ module Patch =
/// The table in which a document should be patched (may include schema)
/// The ID of the document to patch
/// The partial document to patch the existing document
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
[]
let byId tableName (docId: 'TKey) (patch: 'TPatch) sqlProps =
Custom.nonQuery
(Query.byId (Query.patch tableName) docId) [ idParam docId; jsonParam "@data" patch ] sqlProps
///
- /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.)
+ /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.)
///
/// The table in which documents should be patched (may include schema)
/// Whether to match any or all of the field conditions
/// The field conditions to match
/// The partial document to patch the existing document
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
[]
let byFields tableName howMatched fields (patch: 'TPatch) sqlProps =
Custom.nonQuery
@@ -726,11 +1194,11 @@ module Patch =
(addFieldParams fields [ jsonParam "@data" patch ])
sqlProps
- /// Patch documents using a JSON containment query in the WHERE clause (@>)
+ /// Patch documents using a JSON containment query in the WHERE clause (@>)
/// The table in which documents should be patched (may include schema)
/// The document to match the containment query
/// The partial document to patch the existing document
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
[]
let byContains tableName (criteria: 'TContains) (patch: 'TPatch) sqlProps =
Custom.nonQuery
@@ -738,11 +1206,11 @@ module Patch =
[ jsonParam "@data" patch; jsonParam "@criteria" criteria ]
sqlProps
- /// Patch documents using a JSON Path match query in the WHERE clause (@?)
+ /// Patch documents using a JSON Path match query in the WHERE clause (@?)
/// The table in which documents should be patched (may include schema)
/// The JSON Path expression to match
/// The partial document to patch the existing document
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
[]
let byJsonPath tableName jsonPath (patch: 'TPatch) sqlProps =
Custom.nonQuery
@@ -758,7 +1226,7 @@ module RemoveFields =
/// The table in which a document should be modified (may include schema)
/// The ID of the document to modify
/// One or more field names to remove from the document
- /// The SqlProps to use to execute the query
+ /// The SqlProps to use to execute the query
[]
let byId tableName (docId: 'TKey) fieldNames sqlProps =
Custom.nonQuery
@@ -769,7 +1237,7 @@ module RemoveFields =
///