diff --git a/.gitignore b/.gitignore index 06998cb..47c72c2 100644 --- a/.gitignore +++ b/.gitignore @@ -400,3 +400,7 @@ FodyWeavers.xsd # Test run files src/*-tests.txt + +# Documentation builds and intermediate files +_site/ +api/ diff --git a/bitbadger-doc.png b/bitbadger-doc.png new file mode 100644 index 0000000..22b1fe2 Binary files /dev/null and b/bitbadger-doc.png differ diff --git a/doc-template/public/main.css b/doc-template/public/main.css new file mode 100644 index 0000000..cfa8c03 --- /dev/null +++ b/doc-template/public/main.css @@ -0,0 +1,4 @@ +article h2 { + border-bottom: solid 1px gray; + margin-bottom: 1rem; +} diff --git a/doc-template/public/main.js b/doc-template/public/main.js new file mode 100644 index 0000000..e60362e --- /dev/null +++ b/doc-template/public/main.js @@ -0,0 +1,10 @@ +export default { + defaultTheme: "auto", + iconLinks: [ + { + icon: "git", + href: "https://git.bitbadger.solutions/bit-badger/BitBadger.Documents", + title: "Source Repository" + } + ] +} \ No newline at end of file diff --git a/docfx.json b/docfx.json new file mode 100644 index 0000000..b4ce50f --- /dev/null +++ b/docfx.json @@ -0,0 +1,59 @@ +{ + "$schema": "https://raw.githubusercontent.com/dotnet/docfx/main/schemas/docfx.schema.json", + "metadata": [ + { + "src": [ + { + "src": "./src", + "files": [ + "Common/bin/Release/net9.0/*.dll", + "Postgres/bin/Release/net9.0/*.dll", + "Sqlite/bin/Release/net9.0/*.dll" + ] + } + ], + "dest": "api", + "properties": { + "TargetFramework": "net9.0" + } + } + ], + "build": { + "content": [ + { + "files": [ + "index.md", + "toc.yml", + "api/**/*.{md,yml}", + "docs/**/*.{md,yml}" + ], + "exclude": [ + "_site/**" + ] + } + ], + "resource": [ + { + "files": [ + "bitbadger-doc.png", + "favicon.ico" + ] + } + ], + "output": "_site", + "template": [ + "default", + "modern", + "doc-template" + ], + "globalMetadata": { + "_appName": "BitBadger.Documents", + "_appTitle": "BitBadger.Documents", + "_appLogoPath": "bitbadger-doc.png", + "_appFaviconPath": "favicon.ico", + "_appFooter": "Hand-crafted documentation created with docfx by Bit Badger Solutions", + "_enableSearch": true, + "pdf": false + } + } +} diff --git a/docs/advanced/custom-serialization.md b/docs/advanced/custom-serialization.md new file mode 100644 index 0000000..b9e108c --- /dev/null +++ b/docs/advanced/custom-serialization.md @@ -0,0 +1,38 @@ +# Custom Serialization + +_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._ + +JSON documents are sent to and received from both PostgreSQL and SQLite as `string`s; the translation to and from your domain objects (commonly called POCOs) is handled via .NET. By default, the serializer used by the library is based on `System.Text.Json` with [converters for common F# types][fs]. + +## Implementing a Custom Serializer + +`IDocumentSerializer` (found in the `BitBadger.Documents` namespace) specifies two methods. `Serialize` takes a `T` and returns a `string`; `Deserialize` takes a `string` and returns an instance of `T`. (These show as `'T` in F#.) While implementing those two methods is required, the custom implementation can use whatever library you desire, and contain converters for custom types. + +Once this serializer is implemented and constructed, provide it to the library: + +```csharp +// C# +var serializer = /* constructed serializer */; +Configuration.UseSerializer(serializer); +``` + +```fsharp +// F# +let serializer = (* constructed serializer *) +Configuration.useSerializer serializer +``` + +The biggest benefit to registering a serializer (apart from control) is that all JSON operations will use the same serializer. This is most important for PostgreSQL's JSON containment queries; the object you pass as the criteria will be translated properly before it is compared. However, "unstructured" data does not mean "inconsistently structured" data; if your application uses custom serialization, extending this to your documents ensures that the structure is internally consistent. + +## Uses for Custom Serialization + +- If you use a custom serializer (or serializer options) in your application, a custom serializer implementation can utilize these existing configuration options. +- If you prefer [`Newtonsoft.Json`][nj], you can wrap `JsonConvert` or `JsonSerializer` calls in a custom converter. F# users may consider incorporating Microsoft's [`FSharpLu.Json`][fj] converter. +- If your project uses [`NodaTime`][], your custom serializer could include its converters for `System.Text.Json` or `Newtonsoft.Json`. +- If you use DDD to define custom types, you can implement converters to translate them to/from your preferred JSON representation. + + +[fs]: https://github.com/Tarmil/FSharp.SystemTextJson "FSharp.SystemTextJson • GitHub" +[nj]: https://www.newtonsoft.com/json "Json.NET" +[fj]: https://github.com/microsoft/fsharplu/blob/main/FSharpLu.Json.md "FSharpLu.Json • GitHub" +[`NodaTime`]: https://nodatime.org/ "NodaTime" diff --git a/docs/advanced/index.md b/docs/advanced/index.md new file mode 100644 index 0000000..270feb4 --- /dev/null +++ b/docs/advanced/index.md @@ -0,0 +1,16 @@ +# Advanced Usage + +_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._ + +While the functions provided by the library cover lots of use cases, there are other times when applications need something else. Below are some of those. + +- [Customizing Serialization][ser] +- [Related Documents and Custom Queries][rel] +- [Transactions][txn] +- [Referential Integrity with Documents][ref] (PostgreSQL only; conceptual) + + +[ser]: ./custom-serialization.md "Advanced Usage: Custom Serialization • BitBadger.Documents" +[rel]: ./related.md "Advanced Usage: Related Documents • BitBadger.Documents" +[txn]: ./transactions.md "Advanced Usage: Transactions • BitBadger.Documents" +[ref]: /concepts/referential-integrity.html "Appendix: Referential Integrity with Documents • Concepts • Relationanl Documents" diff --git a/docs/advanced/related.md b/docs/advanced/related.md new file mode 100644 index 0000000..b75b50c --- /dev/null +++ b/docs/advanced/related.md @@ -0,0 +1,379 @@ +# Related Documents and Custom Queries + +_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._ + +_NOTE: This page is longer than the ideal documentation page. Understanding how to assemble custom queries requires understanding how data is stored, and the list of ways to retrieve information can be... a lot. The hope is that one reading will serve as education, and the lists of options will serve as reference lists that will assist you in crafting your queries._ + +## Overview + +Document stores generally have fewer relationships than traditional relational databases, particularly those that arise when data is structured in [Third Normal Form][tnf]; related collections are stored in the document, and ever-increasing surrogate keys (_a la_ sequences and such) do not play well with distributed data. Unless all data is stored in a single document, though, there will still be a natural relation between documents. + +Thinking back to our earlier examples, we did not store the collection of rooms in each hotel's document; each room is its own document and contains the ID of the hotel as one of its properties. + +```csharp +// C# +public class Hotel +{ + public string Id { get; set; } = ""; + // ... more properties +} + +public class Room +{ + public string Id { get; set; } = ""; + public string HotelId { get; set; } = ""; + // ... more properties +} +``` + +```fsharp +// F# +[] +type Hotel = + { Id: string + // ... more fields + } + +[] +type Room = + { Id: string + HotelId: string + // ... more fields + } +``` + +> The `CLIMutable` attribute is required on record types that are instantiated by the CLR; this attribute generates a zero-parameter constructor. + +## Document Table SQL in Depth + +The library creates tables with a `data` column of type `JSONB` (PostgreSQL) or `TEXT` (SQLite), with a unique index on the configured ID name that serves as the primary key (for these examples, we'll assume it's the default `Id`). The indexes created by the library all apply to the `data` column. The by-ID query for a hotel would be... + +```sql +SELECT data FROM hotel WHERE data->>'Id' = @id +``` + +...with the ID passed as the `@id` parameter. + +> _Using a "building block" method/function `Query.WhereById` will create the `data->>'Id' = @id` criteria using [the configured ID name][id]._ + +Finding all the rooms for a hotel, using our indexes we created earlier, could use a field comparison query... + +```sql +SELECT data FROM room WHERE data->>'HotelId' = @field +``` + +...with `@field` being "abc123"; PostgreSQL could also use a JSON containment query... + +```sql +SELECT data FROM room WHERE data @> @criteria +``` + +...with something like `new { HotelId = "abc123" }` passed as the matching document in the `@criteria` parameter. + +So far, so good; but, if we're looking up a room, we do not want to have to make 2 queries just to also be able to display the hotel's name. The `WHERE` clause on the first query above uses the expression `data->>'Id'`; this extracts a field from a JSON column as `TEXT` in PostgreSQL (or "best guess" in SQLite, but usually text). Since this is the value our unique index indexes, and we are using a relational database, we can write an efficient JOIN between these two tables. + +```sql +SELECT r.data, h.data AS hotel_data + FROM room r + INNER JOIN hotel h ON h.data->>'Id' = r.data->>'HotelId' + WHERE r.data->>'Id' = @id +``` + +_(This syntax would work without the unique index; for PostgreSQL, it would default to using the GIN index (`Full` or `Optimized`), if it exists, but it wouldn't be quite as efficient as a zero-or-one unique index lookup. For SQLite, this would result in a full table scan. Both PostgreSQL and SQLite also support a `->` operator, which extracts the field as a JSON value instead of its text.)_ + +## Using Building Blocks + +Most of the data access methods in both libraries are built up from query fragments and reusable functions; these are exposed for use in building custom queries. + +### Queries + +For every method or function described in [Basic Usage][], the `Query` static class/module contains the building blocks needed to construct query for that operation. Both the parent and implementation namespaces have a `Query` module; in C#, you'll need to qualify the implementation module namespace. + +In `BitBadger.Documents.Query`, you'll find: +- **StatementWhere** takes a SQL statement and a `WHERE` clause and puts them together on either side of the text ` WHERE ` +- **Definition** contains methods/functions to ensure tables, their keys, and field indexes exist. +- **Insert**, **Save**, **Count**, **Find**, **Update**, and **Delete** are the prefixes of the queries for those actions; they all take a table name and return this query (with no `WHERE` clause) +- **Exists** also requires a `WHERE` clause, due to how the query is constructed + because it is inserted as a subquery + +Within each implementation's `Query` module: +- **WhereByFields** takes a `FieldMatch` case and a set of fields. `Field` has constructor functions for each comparison it supports; these functions generally take a field name and a value, though the latter two do not require a value. + - **Equal** uses `=` to create an equality comparison + - **Greater** uses `>` to create a greater-than comparison + - **GreaterOrEqual** uses `>=` to create a greater-than-or-equal-to comparison + - **Less** uses `<` to create a less-than comparison + - **LessOrEqual** uses `<=` to create a less-than-or-equal-to comparison + - **NotEqual** uses `<>` to create a not-equal comparison + - **Between** uses `BETWEEN` to create a range comparison + - **In** uses `IN` to create an equality comparison within a set of given values + - **InArray** uses `?|` in PostgreSQL, and a combination of `EXISTS` / `json_each` / `IN` in SQLite, to create an equality comparison within a given set of values against an array in a JSON document + - **Exists** uses `IS NOT NULL` to create an existence comparison + - **NotExists** uses `IS NULL` to create a non-existence comparison; fields are considered null if they are either not part of the document, or if they are part of the document but explicitly set to `null` +- **WhereById** takes a parameter name and generates a field `Equal` comparison against the configured ID field. +- **Patch** and **RemoveFields** use each implementation's unique syntax for partial updates and field removals. +- **ByFields**, **ByContains** (PostgreSQL), and **ByJsonPath** (PostgreSQL) are functions that take a statement and the criteria, and construct a query to fit that criteria. For `ByFields`, each field parameter will use its specified name if provided (an incrementing `field[n]` if not). `ByContains` uses `@criteria` as its parameter name, which can be any object. `ByJsonPath` uses `@path`, which should be a `string`. + +That's a lot of reading! Some examples a bit below will help this make sense. + +### Parameters + +Traditional ADO.NET data access involves creating a connection object, then adding parameters to that object. This library follows a more declarative style, where parameters are passed via `IEnumerable` collections. To assist with creating these collections, each implementation has some helper functions. For C#, these calls will need to be prefixed with `Parameters`; for F#, this module is auto-opened. This is one area where names differ in other than just casing, so both will be listed. + +- **Parameters.Id** / **idParam** generate an `@id` parameter with the numeric, `string`, or `ToString()`ed value of the ID passed. +- **Parameters.Json** / **jsonParam** generate a user-provided-named JSON-formatted parameter for the value passed (this can be used for PostgreSQL's JSON containment queries as well) +- **Parameters.AddFields** / **addFieldParams** append field parameters to the given parameter list +- **Parameters.FieldNames** / **fieldNameParams** create parameters for the list of field names to be removed; for PostgreSQL, this returns a single parameter, while SQLite returns a list of parameters +- **Parameters.None** / **noParams** is an empty set of parameters, and can be cleaner and convey intent better than something like `new[] { }` _(For C# 12 or later, the collection expression `[]` is much terser.)_ + +If you need a parameter beyond these, both `NpgsqlParameter` and `SqliteParameter` have a name-and-value constructor; that isn't many more keystrokes. + +### Results + +The `Results` module is implementation specific. Both libraries provide `Results.FromData`, which deserializes a `data` column into the requested type; and `FromDocument`, which does the same thing, but allows the column to be named as well. We'll see how we can use these in further examples. As with parameters, C# users need to qualify the class name, but the module is auto-opened for F#. + +## Putting It All Together + +The **Custom** static class/module has seven methods/functions: + +- **List** requires a query, parameters, and a mapping function, and returns a list of documents. +- **JsonArray** is the same as `List`, but returns the documents as `string` in a JSON array. +- **WriteJsonArray** writes documents to a `PipeWriter` as they are read from the database; the result is the same a `JsonArray`, but no unified strings is constructed. +- **Single** requires a query, parameters, and a mapping function, and returns one or no documents (C# `TDoc?`, F# `'TDoc option`) +- **JsonSingle** is the same as `Single`, but returns a JSON `string` instead (returning `{}` if no document is found). +- **Scalar** requires a query, parameters, and a mapping function, and returns a scalar value (non-nullable; used for counts, existence, etc.) +- **NonQuery** requires a query and parameters and has no return value + +> _Within each library, every other call is written in terms of these functions; your custom queries will use the same code the provided ones do!_ + +Let's jump in with an example. When we query for a room, let's say that we also want to retrieve its hotel information as well. We saw the query above, but here is how we can implement it using a custom query. + +```csharp +// C#, All +// return type is Tuple? +var data = await Custom.Single( + $"SELECT r.data, h.data AS hotel_data + FROM room r + INNER JOIN hotel h ON h.data->>'{Configuration.IdField()}' = r.data->>'HotelId' + WHERE r.{Query.WhereById("@id")}", + new[] { Parameters.Id("my-room-key") }, + // rdr's type will be RowReader for PostgreSQL, SqliteDataReader for SQLite + rdr => Tuple.Create(Results.FromData(rdr), Results.FromDocument("hotel_data", rdr)); +if (data is not null) +{ + var (room, hotel) = data; + // do stuff with the room and hotel data +} +``` + +```fsharp +// F#, All +// return type is (Room * Hotel) option +let! data = + Custom.single + $"""SELECT r.data, h.data AS hotel_data + FROM room r + INNER JOIN hotel h ON h.data->>'{Configuration.idField ()}' = r.data->>'HotelId' + WHERE r.{Query.whereById "@id"}""" + [ idParam "my-room-key" ] + // rdr's type will be RowReader for PostgreSQL, SqliteDataReader for SQLite + fun rdr -> (fromData rdr), (fromDocument "hotel_data" rdr) +match data with +| Some (Room room, Hotel hotel) -> + // do stuff with room and hotel +| None -> () +``` + +These queries use `Configuration.IdField` and `WhereById` to use the configured ID field. Creating custom queries using these building blocks allows us to utilize the configured value without hard-coding it throughout our custom queries. If the configuration changes, these queries will pick up the new field name seamlessly. + +While this example retrieves the entire document, this is not required. If we only care about the name of the associated hotel, we could amend the query to retrieve only that information. + +```csharp +// C#, All +// return type is Tuple? +var data = await Custom.Single( + $"SELECT r.data, h.data ->> 'Name' AS hotel_name + FROM room r + INNER JOIN hotel h ON h.data->>'{Configuration.IdField()}' = r.data->>'HotelId' + WHERE r.{Query.WhereById("@id")}", + new[] { Parameters.Id("my-room-key") }, + // PostgreSQL + row => Tuple.Create(Results.FromData(row), row.string("hotel_name"))); + // SQLite; could use rdr.GetString(rdr.GetOrdinal("hotel_name")) below as well + // rdr => Tuple.Create(Results.FromData(rdr), rdr.GetString(1))); + +if (data is not null) +{ + var (room, hotelName) = data; + // do stuff with the room and hotel name +} +``` + +```fsharp +// F#, All +// return type is (Room * string) option +let! data = + Custom.single + $"""SELECT r.data, h.data->>'Name' AS hotel_name + FROM room r + INNER JOIN hotel h ON h.data->>'{Configuration.idField ()}' = r.data->>'HotelId' + WHERE r.{Query.whereById "@id"}""" + [ idParam "my-room-key" ] + // PostgreSQL + fun row -> (fromData row), row.string "hotel_name" + // SQLite; could use rdr.GetString(rdr.GetOrdinal("hotel_name")) below as well + // fun rdr -> (fromData rdr), rdr.GetString(1) +match data with +| Some (Room room, string hotelName) -> + // do stuff with room and hotel name +| None -> () +``` + +These queries are amazingly efficient, using 2 unique index lookups to return this data. Even though we do not have a foreign key between these two tables, simply being in a relational database allows us to retrieve this related data. + +Revisiting our "take these rooms out of service" SQLite query from the Basic Usage page, here's how that could look using building blocks available since version 4 (PostgreSQL will accept this query syntax as well, though the parameter types would be different): + +```csharp +// C#, SQLite +var fields = [Field.GreaterOrEqual("RoomNumber", 221), Field.LessOrEqual("RoomNumber", 240)]; +await Custom.NonQuery( + Sqlite.Query.ByFields(Sqlite.Query.Patch("room"), FieldMatch.All, fields, + new { InService = false }), + Parameters.AddFields(fields, [])); +``` + +```fsharp +// F#, SQLite +let fields = [ Field.GreaterOrEqual "RoomNumber" 221; Field.LessOrEqual "RoomNumber" 240 ] +do! Custom.nonQuery + (Query.byFields (Query.patch "room") All fields {| InService = false |}) + (addFieldParams fields [])) +``` + +This uses two field comparisons to incorporate the room number range instead of a `BETWEEN` clause; we would definitely want to have that field indexed if this was going to be a regular query or our data was going to grow beyond a trivial size. + +_You may be thinking "wait - what's the difference between that an the regular `Patch` call?" And you'd be right; that is exactly what `Patch.ByFields` does. `Between` is also a better comparison for this, and either `FieldMatch` type will work, as we're only passing one field. No building blocks required!_ + +```csharp +// C#, All +await Patch.ByFields("room", FieldMatch.Any, [Field.Between("RoomNumber", 221, 240)], + new { InService = false }); +``` + +```fsharp +// F#, All +do! Patch.byFields "room" Any [ Field.Between "RoomNumber" 221 240 ] {| InService = false |} +``` + +## Going Even Further + +### Updating Data in Place + +One drawback to document databases is the inability to update values in place; however, with a bit of creativity, we can do a lot more than we initially think. For a single field, SQLite has a `json_set` function that takes an existing JSON field, a field name, and a value to which it should be set. This allows us to do single-field updates in the database. If we wanted to raise our rates 10% for every room, we could use this query: + +```sql +-- SQLite +UPDATE room SET data = json_set(data, 'Rate', data->>'Rate' * 1.1) +``` + +If we get any more complex, though, Common Table Expressions (CTEs) can help us. Perhaps we decided that we only wanted to raise the rates for hotels in New York, Chicago, and Los Angeles, and we wanted to exclude any brand with the word "Value" in its name. A CTE lets us select the source data we need to craft the update, then use that in the `UPDATE`'s clauses. + +```sql +-- SQLite +WITH to_update AS + (SELECT r.data->>'Id' AS room_id, r.data->>'Rate' AS current_rate, r.data AS room_data + FROM room r + INNER JOIN hotel h ON h.data->>'Id' = r.data->>'HotelId' + WHERE h.data->>'City' IN ('New York', 'Chicago', 'Los Angeles') + AND LOWER(h.data->>'Name') NOT LIKE '%value%') +UPDATE room + SET data = json_set(to_update.room_data, 'Rate', to_update.current_rate * 1.1) + WHERE room->>'Id' = to_update.room_id +``` + +Both PostgreSQL and SQLite provide JSON patching, where multiple fields (or entire structures) can be changed at once. Let's revisit our rate increase; if we are making the rate more than $500, we'll apply a status of "Premium" to the room. If it is less than that, it should keep its same value. + +First up, PostgreSQL: +```sql +-- PostgreSQL +WITH to_update AS + (SELECT r.data->>'Id' AS room_id, (r.data->>'Rate')::decimal AS rate, r.data->>'Status' AS status + FROM room r + INNER JOIN hotel h ON h.data->>'Id' = r.data->>'HotelId' + WHERE h.data->>'City' IN ('New York', 'Chicago', 'Los Angeles') + AND LOWER(h.data ->> 'Name') NOT LIKE '%value%') +UPDATE room + SET data = data || + ('{"Rate":' || to_update.rate * 1.1 || '","Status":"' + || CASE WHEN to_update.rate * 1.1 > 500 THEN 'Premium' ELSE to_update.status END + || '"}') + WHERE room->>'Id' = to_update.room_id +``` + +In SQLite: +```sql +-- SQLite +WITH to_update AS + (SELECT r.data->>'Id' AS room_id, r.data->>'Rate' AS rate, r.data->>'Status' AS status + FROM room r + INNER JOIN hotel h ON h.data->>'Id' = r.data->>'HotelId' + WHERE h.data->>'City' IN ('New York', 'Chicago', 'Los Angeles') + AND LOWER(h.data->>'Name') NOT LIKE '%value%') +UPDATE room + SET data = json_patch(data, json( + '{"Rate":' || to_update.rate * 1.1 || '","Status":"' + || CASE WHEN to_update.rate * 1.1 > 500 THEN 'Premium' ELSE to_update.status END + || '"}')) + WHERE room->>'Id' = to_update.room_id +``` + +For PostgreSQL, `->>` always returns text, so we need to cast the rate to a number. In either case, we do not want to use this technique for user-provided data; however, in place, it allowed us to complete all of our scenarios without having to load the documents into our application and manipulate them there. + +Updates in place may not need parameters (though it would be easy to foresee a "rate adjustment" feature where the 1.1 adjustment was not hard-coded); in fact, none of the samples in this section used the document libraries at all. These queries can be executed by `Custom.NonQuery`, though, providing parameters as required. + +### Using This Library for Non-Document Queries + +The `Custom` methods/functions can be used with non-document tables as well. This may be a convenient and consistent way to access your data, while delegating connection management to the library and its configured data source. + +Let's walk through a short example using C# and PostgreSQL: + +```csharp +// C#, PostgreSQL +using Npgsql.FSharp; // Needed for RowReader and Sql types +using static CommonExtensionsAndTypesForNpgsqlFSharp; // Needed for Sql functions + +// Stores metadata for a given user +public class MetaData +{ + public string Id { get; set; } = ""; + public string UserId { get; set; } = ""; + public string Key { get; set; } = ""; + public string Value { get; set; } = ""; +} + +// Static class to hold mapping functions +public static class Map +{ + // These parameters are the column names from the underlying table + public MetaData ToMetaData(RowReader row) => + new MetaData + { + Id = row.string("id"), + UserId = row.string("user_id"), + Key = row.string("key"), + Value = row.string("value") + }; +} + +// somewhere in a class, retrieving data +public Task> MetaDataForUser(string userId) => + Document.Custom.List("SELECT * FROM user_metadata WHERE user_id = @userId", + new { Tuple.Create("@userId", Sql.string(userId)) }, + Map.ToMetaData); +``` + +For F#, the `using static` above is not needed; that module is auto-opened when `Npgsql.FSharp` is opened. For SQLite in either language, the mapping function uses a `SqliteDataReader` object, which implements the standard ADO.NET `DataReader` functions of `Get[Type](idx)` (and `GetOrdinal(name)` for the column index). + + +[tnf]: https://en.wikipedia.org/wiki/Third_normal_form "Third Normal Form • Wikipedia" +[id]: ../getting-started.md#field-name "Getting Started (ID Fields) • BitBadger.Documents" +[Basic Usage]: ../basic-usage.md "Basic Usage • BitBadger.Documents" diff --git a/docs/advanced/transactions.md b/docs/advanced/transactions.md new file mode 100644 index 0000000..703b075 --- /dev/null +++ b/docs/advanced/transactions.md @@ -0,0 +1,96 @@ +# Transactions + +_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._ + +On occasion, there may be a need to perform multiple updates in a single database transaction, where either all updates succeed, or none do. + +## Controlling Database Transactions + +The `Configuration` static class/module of each library [provides a way to obtain a connection][conn]. Whatever strategy your application uses to obtain the connection, the connection object is how ADO.NET implements transactions. + +```csharp +// C#, All +// "conn" is assumed to be either NpgsqlConnection or SqliteConnection +await using var txn = await conn.BeginTransactionAsync(); +try +{ + // do stuff + await txn.CommitAsync(); +} +catch (Exception ex) +{ + await txn.RollbackAsync(); + // more error handling +} +``` + +```fsharp +// F#, All +// "conn" is assumed to be either NpgsqlConnection or SqliteConnection +use! txn = conn.BeginTransactionAsync () +try + // do stuff + do! txn.CommitAsync () +with ex -> + do! txt.RollbackAsync () + // more error handling +``` + +## Executing Queries on the Connection + +This precise scenario was the reason that all methods and functions are implemented on the connection object; all extensions execute the commands in the context of the connection. Imagine an application where a user signs in. We may want to set an attribute on the user record that says that now is the last time they signed in; and we may also want to reset a failed logon counter, as they have successfully signed in. This would look like: + +```csharp +// C#, All ("conn" is our connection object) +await using var txn = await conn.BeginTransactionAsync(); +try +{ + await conn.PatchById("user_table", userId, new { LastSeen = DateTime.Now }); + await conn.PatchById("security", userId, new { FailedLogOnCount = 0 }); + await txn.CommitAsync(); +} +catch (Exception ex) +{ + await txn.RollbackAsync(); + // more error handling +} +``` + +```fsharp +// F#, All ("conn" is our connection object) +use! txn = conn.BeginTransactionAsync() +try + do! conn.patchById "user_table" userId {| LastSeen = DateTime.Now |} + do! conn.patchById "security" userId {| FailedLogOnCount = 0 |} + do! txn.CommitAsync() +with ex -> + do! txn.RollbackAsync() + // more error handling +``` + +### A Functional Alternative + +The PostgreSQL library has a static class/module called `WithProps`; the SQLite library has a static class/module called `WithConn`. Each of these accept the `SqlProps` or `SqliteConnection` parameter as the last parameter of the query. For SQLite, we need nothing else to pass the connection to these methods/functions; for PostgreSQL, though, we'll need to create a `SqlProps` object based off the connection. + +```csharp +// C#, PostgreSQL +using Npgsql.FSharp; +// ... +var props = Sql.existingConnection(conn); +// ... +await WithProps.Patch.ById("user_table", userId, new { LastSeen = DateTime.Now }, props); +``` + +```fsharp +// F#, PostgreSQL +open Npgsql.FSharp +// ... +let props = Sql.existingConnection conn +// ... +do! WithProps.Patch.ById "user_table" userId {| LastSeen = DateTime.Now |} props +``` + +If we do not want to qualify with `WithProps` or `WithConn`, C# users can add `using static [WithProps|WithConn];` to bring these functions into scope; F# users can add `open BitBadger.Documents.[Postgres|Sqlite].[WithProps|WithConn]` to bring them into scope. However, in C#, this will affect the entire file, and in F#, it will affect the file from that point through the end of the file. Unless you want to go all-in with the connection-last functions, it is probably better to qualify the occasional call. + + +[conn]: ../getting-started.md#the-connection "Getting Started (The Connection) • BitBadger.Documents" diff --git a/docs/basic-usage.md b/docs/basic-usage.md new file mode 100644 index 0000000..6cb6caa --- /dev/null +++ b/docs/basic-usage.md @@ -0,0 +1,149 @@ +# Basic Usage + +_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._ + +## Overview + +There are several categories of operations that can be accomplished against documents. + +- **Count** returns the number of documents matching some criteria +- **Exists** returns true if any documents match the given criteria +- **Insert** adds a new document, failing if the ID field is not unique +- **Save** adds a new document, updating an existing one if the ID is already present ("upsert") +- **Update** updates an existing document, doing nothing if no documents satisfy the criteria +- **Patch** updates a portion of an existing document, doing nothing if no documents satisfy the criteria +- **Find** returns the documents matching some criteria as domain objects +- **Json** returns or writes documents matching some criteria as JSON text +- **RemoveFields** removes fields from documents matching some criteria +- **Delete** removes documents matching some criteria + +`Insert` and `Save` were the only two that don't mention criteria. For the others, "some criteria" can be defined a few different ways: +- **All** references all documents in the table; applies to Count and Find +- **ById** looks for a single document on which to operate; applies to all but Count +- **ByFields** uses JSON field comparisons to select documents for further processing (PostgreSQL will use a numeric comparison if the field value is numeric, or a string comparison otherwise; SQLite will do its usual [best-guess on types][]{target=_blank rel=noopener}); applies to all but Update +- **ByContains** (PostgreSQL only) uses a JSON containment query (the `@>` operator) to find documents where the given sub-document occurs (think of this as an `=` comparison based on one or more properties in the document; looking for hotels with `{ "Country": "USA", "Rating": 4 }` would find all hotels with a rating of 4 in the United States); applies to all but Update +- **ByJsonPath** (PostgreSQL only) uses a JSON patch match query (the `@?` operator) to make specific queries against a document's structure (it also supports more operators than a containment query; to find all hotels rated 4 _or higher_ in the United States, we could query for `"$ ? (@.Country == \"USA\" && @.Rating > 4)"`); applies to all but Update + +Finally, `Find` and `Json` also have `FirstBy*` implementations for all supported criteria types, and `Find*Ordered` implementations to sort the results in the database. + +## Saving Documents + +The library provides three different ways to save data. The first equates to a SQL `INSERT` statement, and adds a single document to the repository. + +```csharp +// C#, All +var room = new Room(/* ... */); +// Parameters are table name and document +await Document.Insert("room", room); +``` + +```fsharp +// F#, All +let room = { Room.empty with (* ... *) } +do! insert "room" room +``` + +The second is `Save`; and inserts the data it if does not exist and replaces the document if it does exist (what some call an "upsert"). It utilizes the `ON CONFLICT` syntax to ensure an atomic statement. Its parameters are the same as those for `Insert`. + +The third equates to a SQL `UPDATE` statement. `Update` applies to a full document and is usually used by ID, while `Patch` is used for partial updates and may be done by field comparison, JSON containment, or JSON Path match. For a few examples, let's begin with a query that may back the "edit hotel" page. This page lets the user update nearly all the details for the hotel, so updating the entire document would be appropriate. + +```csharp +// C#, All +var hotel = await Document.Find.ById("hotel", hotelId); +if (!(hotel is null)) +{ + // update hotel properties from the posted form + await Update.ById("hotel", hotel.Id, hotel); +} +``` + +```fsharp +// F#, All +match! Find.byId "hotel" hotelId with +| Some hotel -> + do! Update.byId "hotel" hotel.Id updated + { hotel with (* properties from posted form *) } +| None -> () +``` + +For the next example, suppose we are upgrading our hotel, and need to take rooms 221-240 out of service*. We can utilize a patch via JSON Path** to accomplish this. + +```csharp +// C#, PostgreSQL +await Patch.ByJsonPath("room", + "$ ? (@.HotelId == \"abc\" && (@.RoomNumber >= 221 && @.RoomNumber <= 240)", + new { InService = false }); +``` + +```fsharp +// F#, PostgreSQL +do! Patch.byJsonPath "room" + "$ ? (@.HotelId == \"abc\" && (@.RoomNumber >= 221 && @.RoomNumber <= 240)" + {| InService = false |}; +``` + +_* - we are ignoring the current reservations, end date, etc. This is very naïve example!_ + +\** - Both PostgreSQL and SQLite can also accomplish this using the `Between` comparison and a `ByFields` query: + +```csharp +// C#, Both +await Patch.ByFields("room", FieldMatch.Any, [Field.Between("RoomNumber", 221, 240)], + new { InService = false }); +``` + +```fsharp +// F#, Both +do! Patch.byFields "room" Any [ Field.Between "RoomNumber" 221 240 ] {| InService = false |} +``` + +This could also be done with `All`/`FieldMatch.All` and `GreaterOrEqual` and `LessOrEqual` field comparisons, or even a custom query; these are fully explained in the [Advanced Usage][] section. + +> There is an `Update.ByFunc` variant that takes an ID extraction function run against the document instead of its ID. This is detailed in the [Advanced Usage][] section. + +## Finding Documents as Domain Items + +Functions to find documents start with `Find.`. There are variants to find all documents in a table, find by ID, find by JSON field comparisons, find by JSON containment, or find by JSON Path. The hotel update example above utilizes an ID lookup; the descriptions of JSON containment and JSON Path show examples of the criteria used to retrieve using those techniques. + +`Find` methods and functions are generic; specifying the return type is crucial. Additionally, `ById` will need the type of the key being passed. In C#, `ById` and the `FirstBy*` methods will return `TDoc?`, with the value if it was found or `null` if it was not; `All` and other `By*` methods return `List` (from `System.Collections.Generic`). In F#, `byId` and the `firstBy*` functions will return `'TDoc option`; `all` and other `by*` functions return `'TDoc list`. + +`Find*Ordered` methods and function append an `ORDER BY` clause to the query that will sort the results in the database. These take, as their last parameter, a sequence of `Field` items; a `.Named` method allows for field creation for these names. Within these names, prefixing the name with `n:` will tell PostgreSQL to sort this field numerically rather than alphabetically; it has no effect in SQLite (it does its own [type coercion][best-guess on types]). Adding " DESC" at the end will sort high-to-low instead of low-to-high. + +## Finding Documents as JSON + +All `Find` methods and functions have two corresponding `Json` functions. + +* The first set return the expected document(s) as a `string`, and will always return valid JSON. Single-document queries with nothing found will return `{}`, while zero-to-many queries will return `[]` if no documents match the given criteria. +* The second set are prefixed with `Write`, and take a `PipeWriter` immediately after the table name parameter. These functions write results to the given pipeline as they are retrieved from the database, instead of accumulating them all and returning a `string`. This can be useful for JSON API scenarios; ASP.NET Core's `HttpResponse.BodyWriter` property is a `PipeWriter` (and pipelines are [preferred over streams][pipes]). + +## Deleting Documents + +Functions to delete documents start with `Delete.`. Document deletion is supported by ID, JSON field comparison, JSON containment, or JSON Path match. The pattern is the same as for finding or partially updating. _(There is no library method provided to delete all documents, though deleting by JSON field comparison where a non-existent field is null would accomplish this.)_ + +## Counting Documents + +Functions to count documents start with `Count.`. Documents may be counted by a table in its entirety, by JSON field comparison, by JSON containment, or by JSON Path match. _(Counting by ID is an existence check!)_ + +## Document Existence + +Functions to check for existence start with `Exists.`. Documents may be checked for existence by ID, JSON field comparison, JSON containment, or JSON Path match. + +## What / How Cross-Reference + +The table below shows which commands are available for each access method. (X = supported for both, P = PostgreSQL only) + +| Operation | `All` | `ById` | `ByFields` | `ByContains` | `ByJsonPath` | `FirstByFields` | `FirstByContains` | `FirstByJsonPath` | +|-----------------|:-----:|:------:|:----------:|:------------:|:------------:|:---------------:|:-----------------:|:-----------------:| +| `Count` | X | | X | P | P | | | | +| `Exists` | | X | X | P | P | | | | +| `Find` / `Json` | X | X | X | P | P | X | P | P | +| `Patch` | | X | X | P | P | | | | +| `RemoveFields` | | X | X | P | P | | | | +| `Delete` | | X | X | P | P | | | | + +`Insert`, `Save`, and `Update.*` operate on single documents. + +[best-guess on types]: https://sqlite.org/datatype3.html "Datatypes in SQLite • SQLite" +[JSON Path]: https://www.postgresql.org/docs/15/functions-json.html#FUNCTIONS-SQLJSON-PATH "JSON Functions and Operators • PostgreSQL Documentation" +[Advanced Usage]: ./advanced/index.md "Advanced Usage • BitBadger.Documents • Bit Badger Solutions" +[pipes]: https://learn.microsoft.com/en-us/aspnet/core/fundamentals/middleware/request-response?view=aspnetcore-9.0 "Request and Response Operations • Microsoft Learn" diff --git a/docs/getting-started.md b/docs/getting-started.md new file mode 100644 index 0000000..3767ac0 --- /dev/null +++ b/docs/getting-started.md @@ -0,0 +1,187 @@ +# Getting Started +## Overview + +Each library has three different ways to execute commands: +- Functions/methods that have no connection parameter at all; for these, each call obtains a new connection. _(Connection pooling greatly reduced this overhead and churn on the database)_ +- Functions/methods that take a connection as the last parameter; these use the given connection to execute the commands. +- Extensions on the `NpgsqlConnection` or `SqliteConnection` type (native for both C# and F#); these are the same as the prior ones, and the names follow a similar pattern (ex. `Count.All()` is exposed as `conn.CountAll()`). + +This provides flexibility in how connections are managed. If your application does not care about it, configuring the library is all that is required. If your application generally does not care, but needs a connection on occasion, one can be obtained from the library and used as required. If you are developing a web application, and want to use one connection per request, you can register the library's connection functions as a factory, and have that connection injected. We will cover the how-to below for each scenario, but it is worth considering before getting started. + +> A note on functions: the F# functions use `camelCase`, while C# calls use `PascalCase`. To cut down on the noise, this documentation will generally use the C# `Count.All` form; know that this is `Count.all` for F#, `conn.CountAll()` for the C# extension method, and `conn.countAll` for the F# extension. + +## Namespaces + +### C# + +```csharp +using BitBadger.Documents; +using BitBadger.Documents.[Postgres|Sqlite]; +``` + +### F# + +```fsharp +open BitBadger.Documents +open BitBadger.Documents.[Postgres|Sqlite] +``` + +For F#, this order is significant; both namespaces have modules that share names, and this order will control which one shadows the other. + +## Configuring the Connection + +### The Connection String + +Both PostgreSQL and SQLite use the standard ADO.NET connection string format ([`Npgsql` docs][], [`Microsoft.Data.Sqlite` docs][]). The usual location for these is an `appsettings.json` file, which is then parsed into an `IConfiguration` instance. For SQLite, all the library needs is a connection string: + +```csharp +// C#, SQLite +// ... +var config = ...; // parsed IConfiguration +Sqlite.Configuration.UseConnectionString(config.GetConnectionString("SQLite")); +// ... +``` + +```fsharp +// F#, SQLite +// ... +let config = ...; // parsed IConfiguration +Configuration.useConnectionString (config.GetConnectionString("SQLite")) +// ... +``` + +For PostgreSQL, the library needs an `NpgsqlDataSource` instead. There is a builder that takes a connection string and creates it, so it still is not a lot of code: _(although this implements `IDisposable`, do not declare it with `using` or `use`; the library handles disposal if required)_ + +```csharp +// C#, PostgreSQL +// ... +var config = ...; // parsed IConfiguration +var dataSource = new NpgsqlDataSourceBuilder(config.GetConnectionString("Postgres")).Build(); +Postgres.Configuration.UseDataSource(dataSource); +// ... +``` + +```fsharp +// F#, PostgreSQL +// ... +let config = ...; // parsed IConfiguration +let dataSource = new NpgsqlDataSourceBuilder(config.GetConnectionString("Postgres")).Build() +Configuration.useDataSource dataSource +// ... +``` + +### The Connection + +- If the application does not care to control the connection, use the methods/functions that do not require one. +- To retrieve an occasional connection (possibly to do multiple updates in a transaction), the `Configuration` static class/module for each implementation has a way. (For both of these, define the result with `using` or `use` so that they are disposed properly.) + - For PostgreSQL, the `DataSource()` method returns the configured `NpgsqlDataSource` instance; from this, `OpenConnection[Async]()` can be used to obtain a connection. + - For SQLite, the `DbConn()` method returns a new, open `SqliteConnection`. +- To use a connection per request in a web application scenario, register it with DI. + +```csharp +// C#, PostgreSQL +builder.Services.AddScoped(svcProvider => + Postgres.Configuration.DataSource().OpenConnection()); +// C#, SQLite +builder.Services.AddScoped(svcProvider => Sqlite.Configuration.DbConn()); +``` + +```fsharp +// F#, PostgreSQL +let _ = builder.Services.AddScoped Configuration.dataSource().OpenConnection()) +// F#, SQLite +let _ = builder.Services.AddScoped(fun sp -> Configuration.dbConn ()) +``` + +After registering, this connection will be available on the request context and can be injected in the constructor for things like Razor Pages or MVC Controllers. + +## Configuring Document IDs + +### Field Name + +A common .NET pattern when naming unique identifiers for entities / documents / etc. is the name `Id`. By default, this library assumes that this field is the identifier for your documents. If your code follows this pattern, you will be happy with the default behavior. If you use a different property, or [implement a custom serializer][ser] to modify the JSON representation of your documents' IDs, though, you will need to configure that field name before you begin calling other functions or methods. A great spot for this is just after you configure the connection string or data source (above). If you have decided that the field "Name" is the unique identifier for your documents, your setup would look something like... + +```csharp +// C#, All +Configuration.UseIdField("Name"); +``` + +```fsharp +// F#, All +Configuration.useIdField "Name" +``` + +Setting this will make `EnsureTable` create the unique index on that field when it creates a table, and will make all the `ById` functions and methods look for `data->>'Name'` instead of `data->>'Id'`. JSON is case-sensitive, so if the JSON is camel-cased, this should be configured to be `id` instead of `Id` (or `name` to follow the example above). + +### Generation Strategy + +The library can also generate IDs if they are missing. There are three different types of IDs, and each case of the `AutoId` enumeration/discriminated union can be passed to `Configuration.UseAutoIdStrategy()` to configure the library. + +- `Number` generates a "max ID plus 1" query based on the current values of the table. +- `Guid` generates a 32-character string from a Globally Unique Identifier (GUID), lowercase with no dashes. +- `RandomString` generates random bytes and converts them to a lowercase hexadecimal string. By default, the string is 16 characters, but can be changed via `Configuration.UseIdStringLength()`. _(You can also use `AutoId.GenerateRandomString(length)` to generate these strings for other purposes; they make good salts, transient keys, etc.)_ + +All of these are off by default (the `Disabled` case). Even when ID generation is configured, though, only IDs of 0 (for `Number`) or empty strings (for `Guid` and `RandomString`) will be generated. IDs are only generated on `Insert`. + +> Numeric IDs are a one-time decision. In PostgreSQL, once a document has a non-numeric ID, attempts to insert an automatic number will fail. One could switch from numbers to strings, and the IDs would be treated as such (`"33"` instead of `33`, for example). SQLite does a best-guess typing of columns, but once a string ID is there, the "max + 1" algorithm will not return the expected results. + +## Ensuring Tables and Indexes Exist + +Both PostgreSQL and SQLite store data in tables and can utilize indexes to retrieve that data efficiently. Each application will need to determine the tables and indexes it expects. + +To discover these concepts, let's consider a naive example of a hotel chain; they have several hotels, and each hotel has several rooms. While each hotel could have its rooms as part of a `Hotel` document, there would likely be a lot of contention when concurrent updates for rooms, so we will put rooms in their own table. The hotel will store attributes like name, address, etc.; while each room will have the hotel's ID (named `Id`), along with things like room number, floor, and a list of date ranges where the room is not available. (This could be for customer reservation, maintenance, etc.) + +_(Note that all "ensure" methods/functions below use the `IF NOT EXISTS` clause; they are safe to run each time the application starts up, and will do nothing if the tables or indexes already exist.)_ + +### PostgreSQL + +We have a few options when it comes to indexing our documents. We can index a specific JSON field; each table's primary key is implemented as a unique index on the configured ID field. We can also use a GIN index to index the entire document, and that index can even be [optimized for a subset of JSON Path operators][json-index]. + +Let's create a general-purpose index on hotels, a "HotelId" index on rooms, and an optimized document index on rooms. + +```csharp +// C#, Postgresql +await Definition.EnsureTable("hotel"); +await Definition.EnsureDocumentIndex("hotel", DocumentIndex.Full); +await Definition.EnsureTable("room"); +// parameters are table name, index name, and fields to be indexed +await Definition.EnsureFieldIndex("room", "hotel_id", ["HotelId"]); +await Definition.EnsureDocumentIndex("room", DocumentIndex.Optimized); +``` + +```fsharp +// F#, PostgreSQL +do! Definition.ensureTable "hotel" +do! Definition.ensureDocumentIndex "hotel" Full +do! Definition.ensureTable "room" +do! Definition.ensureFieldIndex "room" "hotel_id" [ "HotelId" ] +do! Definition.ensureDocumentIndex "room" Optimized +``` + +### SQLite + +For SQLite, the only option for JSON indexes (outside some quite complex techniques) are indexes on fields. Just as traditional relational indexes, these fields can be specified in expected query order. In our example, if we indexed our rooms on hotel ID and room number, it could also be used for efficient retrieval just by hotel ID. + +Let's create hotel and room tables, then index rooms by hotel ID and room number. + +```csharp +// C#, SQLite +await Definition.EnsureTable("hotel"); +await Definition.EnsureTable("room"); +await Definition.EnsureIndex("room", "hotel_and_nbr", ["HotelId", "RoomNumber"]); +``` + +```fsharp +// F# +do! Definition.ensureTable "hotel" +do! Definition.ensureTable "room" +do! Definition.ensureIndex "room" "hotel_and_nbr", [ "HotelId"; "RoomNumber" ] +``` + +Now that we have tables, let's [use them][]! + +[`Npgsql` docs]: https://www.npgsql.org/doc/connection-string-parameters "Connection String Parameter • Npgsql" +[`Microsoft.Data.Sqlite` docs]: https://learn.microsoft.com/en-us/dotnet/standard/data/sqlite/connection-strings "Connection Strings • Microsoft.Data.Sqlite • Microsoft Learn" +[ser]: ./advanced/custom-serialization.md "Advanced Usage: Custom Serialization • BitBadger.Documents" +[json-index]: https://www.postgresql.org/docs/current/datatype-json.html#JSON-INDEXING "Indexing JSON Fields • PostgreSQL" +[use them]: ./basic-usage.md "Basic Usage • BitBadger.Documents" diff --git a/docs/toc.yml b/docs/toc.yml new file mode 100644 index 0000000..941f98c --- /dev/null +++ b/docs/toc.yml @@ -0,0 +1,21 @@ +- name: Getting Started + href: getting-started.md +- name: Basic Usage + href: basic-usage.md +- name: Advanced Usage + href: advanced/index.md + items: + - name: Custom Serialization + href: advanced/custom-serialization.md + - name: Related Documents and Custom Queries + href: advanced/related.md + - name: Transactions + href: advanced/transactions.md +- name: Upgrading + items: + - name: v3 to v4 + href: upgrade/v4.md + - name: v2 to v3 + href: upgrade/v3.md + - name: v1 to v2 + href: upgrade/v2.md diff --git a/docs/upgrade/v2.md b/docs/upgrade/v2.md new file mode 100644 index 0000000..fcf8acf --- /dev/null +++ b/docs/upgrade/v2.md @@ -0,0 +1,37 @@ +# Migrating from v1 to v2 + +_NOTE: This was an upgrade for the `BitBadger.Npgsql.Documents` library, which this library replaced as of v3._ + +## Why + +In version 1 of this library, the document tables used by this library had two columns: `id` and `data`. `id` served as the primary key, and `data` was the `JSONB` column for the document. Since its release, the author learned that a field in a `JSONB` column could have a unique index that would then serve the role of a primary key. + +Version 2 of this library implements this change, both in table setup and in how it constructs queries that occur by a document's ID. + +## How + +On the [GitHub release page][], there is a MigrateToV2 utility program - one for Windows, and one for Linux. Download and extract the single file in the archive; it requires no installation. It uses an environment variable for the connection string, and takes a table name and an ID column field via the command line. + +A quick example under Linux/bash (assuming the ID field in the JSON document is named `Id`)... +``` +export PGDOC_CONN_STR="Host=localhost;Port=5432;User ID=example_user;Password=example_pw;Database=my_docs" +./MigrateToV2 ex.doc_table +./MigrateToV2 ex.another_one +``` + +If the ID field has a different name, it can be passed as a second parameter. The utility will display the table name and ID field and ask for confirmation; if you are scripting it, you can set the environment variable `PGDOC_I_KNOW_WHAT_I_AM_DOING` to `true`, and it will bypass this confirmation. Note that the utility itself is quite basic; you are responsible for giving it sane input. If you have customized the tables or the JSON serializer, though, keep reading. + +## What + +If you have extended the original tables, you may need to handle this migration within either PostgreSQL/psql or your code. The process entails two steps. First, create a unique index on the ID field; in this example, we'll use `name` for the example ID field. Then, drop the `id` column. The below SQL will accomplish this for the fictional `my_table` table. + +```sql +CREATE UNIQUE INDEX idx_my_table_key ON my_table ((data ->> 'name')); +ALTER TABLE my_table DROP COLUMN id; +``` + +If the ID field is different, you will also need to tell the library that. Use `Configuration.UseIdField("name")` (C#) / `Configuration.useIdField "name"` (F#) to specify the name. This will need to be done before queries are executed, as the library uses this field for ID queries. See the [Setting Up instructions][setup] for details on this new configuration parameter. + + +[GitHub release page]: https://github.com/bit-badger/BitBadger.Npgsql.Documents +[setup]: ../getting-started.md#configuring-document-ids "Getting Started • BitBadger.Documents" diff --git a/docs/upgrade/v3.md b/docs/upgrade/v3.md new file mode 100644 index 0000000..8622ebd --- /dev/null +++ b/docs/upgrade/v3.md @@ -0,0 +1,11 @@ +# Upgrade from v2 to v3 + +The biggest change with this release is that `BitBadger.Npgsql.Documents` became `BitBadger.Documents`, a set of libraries providing the same API over both PostgreSQL and SQLite (provided the underlying database supports it). Existing PostgreSQL users should have a smooth transition. + +* Drop `Npgsql` from namespace (`BitBadger.Npgsql.Documents` becomes `BitBadger.Documents`) +* Add implementation (PostgreSQL namespace is `BitBadger.Documents.Postgres`, SQLite is `BitBadger.Documents.Sqlite`) +* Both C# and F# idiomatic functions will be visible when those namespaces are `import`ed or `open`ed +* There is a `Field` constructor for creating field conditions (though look at [v4][]'s changes here as well) + + +[v4]: ./v4.md#op-type-removal "Upgrade from v3 to v4 • BitBadger.Documents" diff --git a/docs/upgrade/v4.md b/docs/upgrade/v4.md new file mode 100644 index 0000000..ef0660d --- /dev/null +++ b/docs/upgrade/v4.md @@ -0,0 +1,35 @@ +# Upgrade from v3 to v4 + +## The Quick Version + +- Add `BitBadger.Documents.[Postgres|Sqlite].Compat` to your list of `using` (C#) or `open` (F#) statements. This namespace has deprecated versions of the methods/functions that were removed in v4. These generate warnings, rather than the "I don't know what this is" compiler errors. +- If your code referenced `Query.[Action].[ById|ByField|etc]`, the sides of the query on each side of the `WHERE` clause are now separate. A query to patch a document by its ID would go from `Query.Patch.ById(tableName)` to `Query.ById(Query.Patch(tableName))`. These functions may also require more parameters; keep reading for details on that. +- Custom queries had to be used when querying more than one field, or when the results in the database needed to be ordered. v4 provides solutions for both of these within the library itself. + +## `ByField` to `ByFields` and PostgreSQL Numbers + +All methods/functions that ended with `ByField` now end with `ByFields`, and take a `FieldMatch` case (`Any` equates to `OR`, `All` equates to `AND`) and sequence of `Field` objects. These `Field`s need to have their values as well, because the PostgreSQL library will now cast the field from the document to numeric and bind the parameter as-is. + +That is an action-packed paragraph; these changes have several ripple effects throughout the library: +- Queries like `Query.Find.ByField` would need the full collection of fields to generate the SQL. Instead, `Query.ByFields` takes a "first-half" statement as its first parameter, then the field match and parameters as its next two. +- `Field` instances in version 3 needed to have a parameter name, which was specified externally to the object itself. In version 4, `ParameterName` is an optional member of the `Field` object, and the library will generate parameter names if it is missing. In both C# and F#, the `.WithParameterName(string)` method can be chained to the `Field.[OP]` call to specify a name, and F# users can also use the language's `with` keyword (`{ Field.EQ "TheField" "value" with ParameterName = Some "@theField" }`). + +## `Op` Type Removal + +The `Op` type has been replaced with a `Comparison` type which captures both the type of comparison and the object of the comparison in one type. This is considered an internal implementation detail, as that type was not intended for use outside the library; however, it was `public`, so its removal warrants at least a mention. + +Additionally, the addition of `In` and `InArray` field comparisons drove a change to the `Field` type's static creation functions. These now have the comparison spelled out, as opposed to the two-to-three character abbreviations. (These abbreviated functions still exists as aliases, so this change will not result in compile errors.) The functions to create fields are: + +| Old | New | +|:-----:|-----------------------| +| `EQ` | `Equal` | +| `GT` | `Greater` | +| `GE` | `GreaterOrEqual` | +| `LT` | `Less` | +| `LE` | `LessOrEqual` | +| `NE` | `NotEqual` | +| `BT` | `Between` | +| `IN` | `In` _(since v4 rc1)_ | +| -- | `InArray` _(v4 rc4)_ | +| `EX` | `Exists` | +| `NEX` | `NotExists` | diff --git a/favicon.ico b/favicon.ico new file mode 100644 index 0000000..22ca446 Binary files /dev/null and b/favicon.ico differ diff --git a/index.md b/index.md new file mode 100644 index 0000000..0f7bc1c --- /dev/null +++ b/index.md @@ -0,0 +1,93 @@ +--- +_layout: landing +title: Welcome! +--- + +BitBadger.Documents provides a lightweight document-style interface over [PostgreSQL][]'s and [SQLite][]'s JSON storage capabilities, with first-class support for both C# and F# programs. _(It is developed by the community; it is not officially affiliated with either project.)_ + +> [!TIP] +> Expecting `BitBadger.Npgsql.Documents`? This library replaced it as of v3. + +## Installing + +### PostgreSQL [![Nuget (with prereleases)][pkg-shield-pgsql]][pkg-link-pgsql] + +```shell +dotnet add package BitBadger.Documents.Postgres +``` + +### SQLite [![Nuget (with prereleases)][pkg-shield-sqlite]][pkg-link-sqlite] + +```shell +dotnet add package BitBadger.Documents.Sqlite +``` + +## Using + +- **[Getting Started][]** provides an overview of the libraries' functions, how to provide connection details, and how to ensure required tables and indexes exist. +- **[Basic Usage][]** details document-level retrieval, persistence, and deletion. +- **[Advanced Usage][]** demonstrates how to use the building blocks provided by this library to write slightly-more complex queries. + +## Upgrading Major Versions + +* [v3 to v4][v3v4] ([Release][v4rel]) - Multiple field queries, ordering support, and automatic IDs +* [v2 to v3][v2v3] ([Release][v3rel]; upgrade from `BitBadger.Npgsql.Documents`) - Namespace / project change +* [v1 to v2][v1v2] ([Release][v2rel]) - Data storage format change + +## Why Documents? + +Document databases usually store JSON objects (as their "documents") to provide schemaless persistence of data; they also provide fault-tolerant ways to query that possibly-unstructured data. [MongoDB][] was the pioneer and is the leader in this space, but there are several who provide their own take on it, and their own programming API to come along with it. They also usually have some sort of clustering, replication, and sharding solution that allows them to be scaled out (horizontally) to handle a large amount of traffic. + +As a mature relational database, PostgreSQL has a long history of robust data access from the .NET environment; Npgsql is actively developed, and provides both ADO.NET and EF Core APIs. PostgreSQL also has well-established, battle-tested horizontal scaling options. Additionally, the [Npgsql.FSharp][] project provides a functional API over Npgsql's ADO.NET data access. These three factors make PostgreSQL an excellent choice for document storage, and its relational nature can help in areas where traditional document databases become more complex. + +SQLite is another mature relational database implemented as a single file, with its access run in-process with the calling application. It works very nicely on its own, with caching and write-ahead logging options; a companion project called [Litestream][] allows these files to be continuously streamed elsewhere, providing point-in-time recovery capabilities one would expect from a relational database. Microsoft provides ADO.NET (and EF Core) drivers for SQLite as part of .NET. These combine to make SQLite a compelling choice, and the hybrid relational/document model allows users to select the model of data that fits their model the best. + +In both cases, the document access functions provided by this library are dead-simple. For more complex queries, it also provides the building blocks to construct these with minimal code. + +## Why Not [something else]? + +We are blessed to live in a time where there are a lot of good data storage options that are more than efficient enough for the majority of use cases. Rather than speaking ill of other projects, here is the vision of the benefits these libraries aim to provide: + +### PostgreSQL + +PostgreSQL is the most popular non-WordPress database for good reason. + +- **Quality** - PostgreSQL's reputation is one of a rock-solid, well-maintained, and continually evolving database. +- **Availability** - Nearly every cloud database provider offers PostgreSQL, and for custom servers, it is a package install away from being up and running. +- **Efficiency** - PostgreSQL is very efficient, and its indexing of JSONB allows for quick access via any field in a document. +- **Maintainability** - The terms "separation of concerns" and "locality of behavior" often compete within a code base, and separation of concerns often wins out; cluttering your logic with SQL can be less than optimal. Using this library, though, it may separate the concerns enough that the calls can be placed directly in the regular logic, providing one fewer place that must be looked up when tracing through the code. +- **Simplicity** - SQL is a familiar language; even when writing manual queries against the data store created by this library, everything one knows about SQL applies, with [a few operators added][json-ops]. +- **Reliability** - The library has a full suite of tests against both the C# and F# APIs, [run against every supported PostgreSQL version][tests] to ensure the functionality provided is what is advertised. + +### SQLite + +The [SQLite "About" page][sqlite-about] has a short description of the project and its strengths. Simplicity, flexibility, and a large install base speak for themselves. A lot of people believe they will need a lot of features offered by server-based relational databases, and live with that complexity even when the project is small. A smarter move may be to build with SQLite; if the need arises for something more, the project is very likely a success! + +Many of the benefits listed for PostgreSQL apply here as well, including its test coverage, but SQLite removes the requirement to run it as a server! + +## Support + +Issues can be filed on the project's GitHub repository. + + +[PostgreSQL]: https://www.postgresql.org/ "PostgreSQL" +[SQLite]: https://sqlite.org/ "SQLite" +[pkg-shield-pgsql]: https://img.shields.io/nuget/vpre/BitBadger.Documents.Postgres +[pkg-link-pgsql]: https://www.nuget.org/packages/BitBadger.Documents.Postgres/ "BitBadger.Documents.Postgres • NuGet" +[pkg-shield-sqlite]: https://img.shields.io/nuget/vpre/BitBadger.Documents.Sqlite +[pkg-link-sqlite]: https://www.nuget.org/packages/BitBadger.Documents.Sqlite/ "BitBadger.Documents.Sqlite • NuGet" +[Getting Started]: ./docs/getting-started.md "Getting Started • BitBadger.Documents" +[Basic Usage]: ./docs/basic-usage.md "Basic Usage • BitBadger.Documents" +[Advanced Usage]: ./docs/advanced/index.md "Advanced Usage • BitBadger.Documents" +[v3v4]: ./docs/upgrade/v4.md "Upgrade from v3 to v4 • BitBadger.Documents" +[v4rel]: https://git.bitbadger.solutions/bit-badger/BitBadger.Documents/releases/tag/v4 "Version 4 • Releases • BitBadger.Documents • Bit Badger Solutions Git" +[v2v3]: ./docs/upgrade/v3.md "Upgrade from v2 to v3 • BitBadger.Documents" +[v3rel]: https://git.bitbadger.solutions/bit-badger/BitBadger.Documents/releases/tag/v3 "Version 3 • Releases • BitBadger.Documents • Bit Badger Solutions Git" +[v1v2]: ./docs/upgrade/v2.md "Upgrade from v1 to v2 • BitBadger.Documents" +[v2rel]: https://github.com/bit-badger/BitBadger.Npgsql.Documents/releases/tag/v2 "Version 2 • Releases • BitBadger.Npgsql.Documents • GitHub" +[MongoDB]: https://www.mongodb.com/ "MongoDB" +[Npgsql.FSharp]: https://zaid-ajaj.github.io/Npgsql.FSharp/#/ "Npgsql.FSharp" +[Litestream]: https://litestream.io/ "Litestream" +[sqlite-about]: https://sqlite.org/about.html "About • SQLite" +[json-ops]: https://www.postgresql.org/docs/15/functions-json.html#FUNCTIONS-JSON-OP-TABLE "JSON Functions and Operators • Documentation • PostgreSQL" +[tests]: https://git.bitbadger.solutions/bit-badger/BitBadger.Documents/releases "Releases • BitBadger.Documents • Bit Badger Solutions Git" diff --git a/src/Common/Library.fs b/src/Common/Library.fs index 8bae732..78c517a 100644 --- a/src/Common/Library.fs +++ b/src/Common/Library.fs @@ -1,44 +1,45 @@ namespace BitBadger.Documents open System.Security.Cryptography +open System.Text /// The types of comparisons available for JSON fields /// type Comparison = - - /// Equals (=) + + /// Equals (=) | Equal of Value: obj - - /// Greater Than (>) + + /// Greater Than (>) | Greater of Value: obj - - /// Greater Than or Equal To (>=) + + /// Greater Than or Equal To (>=) | GreaterOrEqual of Value: obj - - /// Less Than (<) + + /// Less Than (<) | Less of Value: obj - - /// Less Than or Equal To (<=) - | LessOrEqual of Value: obj - - /// Not Equal to (<>) + + /// Less Than or Equal To (<=) + | LessOrEqual of Value: obj + + /// Not Equal to (<>) | NotEqual of Value: obj - - /// Between (BETWEEN) + + /// Between (BETWEEN) | Between of Min: obj * Max: obj - - /// In (IN) + + /// In (IN) | In of Values: obj seq - - /// In Array (PostgreSQL: |?, SQLite: EXISTS / json_each / IN) + + /// In Array (PostgreSQL: |?, SQLite: EXISTS / json_each / IN) | InArray of Table: string * Values: obj seq - - /// Exists (IS NOT NULL) + + /// Exists (IS NOT NULL) | Exists - - /// Does Not Exist (IS NULL) + + /// Does Not Exist (IS NULL) | NotExists - + /// The operator SQL for this comparison member this.OpSql = match this with @@ -50,7 +51,7 @@ type Comparison = | NotEqual _ -> "<>" | Between _ -> "BETWEEN" | In _ -> "IN" - | InArray _ -> "?|" // PostgreSQL only; SQL needs a subquery for this + | InArray _ -> "?|" // PostgreSQL only; SQL needs a subquery for this | Exists -> "IS NOT NULL" | NotExists -> "IS NULL" @@ -62,120 +63,120 @@ type Dialect = | SQLite -/// The format in which an element of a JSON field should be extracted +/// The format in which an element of a JSON field should be extracted [] type FieldFormat = - + /// - /// Use ->> or #>>; extracts a text (PostgreSQL) or SQL (SQLite) value + /// Use ->> or #>>; extracts a text (PostgreSQL) or SQL (SQLite) value /// | AsSql - - /// Use -> or #>; extracts a JSONB (PostgreSQL) or JSON (SQLite) value + + /// Use -> or #>; extracts a JSONB (PostgreSQL) or JSON (SQLite) value | AsJson -/// Criteria for a field WHERE clause +/// Criteria for a field WHERE clause type Field = { - + /// The name of the field Name: string - + /// The comparison for the field Comparison: Comparison - + /// The name of the parameter for this field ParameterName: string option - + /// The table qualifier for this field Qualifier: string option } with - + /// Create a comparison against a field /// The name of the field against which the comparison should be applied /// The comparison for the given field - /// A new Field instance implementing the given comparison + /// A new Field instance implementing the given comparison static member Where name (comparison: Comparison) = { Name = name; Comparison = comparison; ParameterName = None; Qualifier = None } - - /// Create an equals (=) field criterion + + /// Create an equals (=) field criterion /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member Equal<'T> name (value: 'T) = Field.Where name (Equal value) - - /// Create an equals (=) field criterion (alias) + + /// Create an equals (=) field criterion (alias) /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member EQ<'T> name (value: 'T) = Field.Equal name value - - /// Create a greater than (>) field criterion + + /// Create a greater than (>) field criterion /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member Greater<'T> name (value: 'T) = Field.Where name (Greater value) - - /// Create a greater than (>) field criterion (alias) + + /// Create a greater than (>) field criterion (alias) /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member GT<'T> name (value: 'T) = Field.Greater name value - - /// Create a greater than or equal to (>=) field criterion + + /// Create a greater than or equal to (>=) field criterion /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member GreaterOrEqual<'T> name (value: 'T) = Field.Where name (GreaterOrEqual value) - - /// Create a greater than or equal to (>=) field criterion (alias) + + /// Create a greater than or equal to (>=) field criterion (alias) /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member GE<'T> name (value: 'T) = Field.GreaterOrEqual name value - - /// Create a less than (<) field criterion + + /// Create a less than (<) field criterion /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member Less<'T> name (value: 'T) = Field.Where name (Less value) - - /// Create a less than (<) field criterion (alias) + + /// Create a less than (<) field criterion (alias) /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member LT<'T> name (value: 'T) = Field.Less name value - - /// Create a less than or equal to (<=) field criterion + + /// Create a less than or equal to (<=) field criterion /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member LessOrEqual<'T> name (value: 'T) = Field.Where name (LessOrEqual value) - - /// Create a less than or equal to (<=) field criterion (alias) + + /// Create a less than or equal to (<=) field criterion (alias) /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member LE<'T> name (value: 'T) = Field.LessOrEqual name value - - /// Create a not equals (<>) field criterion + + /// Create a not equals (<>) field criterion /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member NotEqual<'T> name (value: 'T) = Field.Where name (NotEqual value) - - /// Create a not equals (<>) field criterion (alias) + + /// Create a not equals (<>) field criterion (alias) /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member NE<'T> name (value: 'T) = Field.NotEqual name value - + /// Create a Between field criterion /// The name of the field to be compared /// The minimum value for the comparison range @@ -183,27 +184,27 @@ type Field = { /// A field with the given comparison static member Between<'T> name (min: 'T) (max: 'T) = Field.Where name (Between(min, max)) - + /// Create a Between field criterion (alias) /// The name of the field to be compared /// The minimum value for the comparison range /// The maximum value for the comparison range /// A field with the given comparison static member BT<'T> name (min: 'T) (max: 'T) = Field.Between name min max - + /// Create an In field criterion /// The name of the field to be compared /// The values for the comparison /// A field with the given comparison static member In<'T> name (values: 'T seq) = Field.Where name (In (Seq.map box values)) - + /// Create an In field criterion (alias) /// The name of the field to be compared /// The values for the comparison /// A field with the given comparison static member IN<'T> name (values: 'T seq) = Field.In name values - + /// Create an InArray field criterion /// The name of the field to be compared /// The name of the table in which the field's documents are stored @@ -211,34 +212,34 @@ type Field = { /// A field with the given comparison static member InArray<'T> name tableName (values: 'T seq) = Field.Where name (InArray(tableName, Seq.map box values)) - - /// Create an exists (IS NOT NULL) field criterion + + /// Create an exists (IS NOT NULL) field criterion /// The name of the field to be compared /// A field with the given comparison static member Exists name = Field.Where name Exists - - /// Create an exists (IS NOT NULL) field criterion (alias) + + /// Create an exists (IS NOT NULL) field criterion (alias) /// The name of the field to be compared /// A field with the given comparison static member EX name = Field.Exists name - - /// Create a not exists (IS NULL) field criterion + + /// Create a not exists (IS NULL) field criterion /// The name of the field to be compared /// A field with the given comparison static member NotExists name = Field.Where name NotExists - - /// Create a not exists (IS NULL) field criterion (alias) + + /// Create a not exists (IS NULL) field criterion (alias) /// The name of the field to be compared /// A field with the given comparison static member NEX name = Field.NotExists name - - /// Transform a field name (a.b.c) to a path for the given SQL dialect + + /// Transform a field name (a.b.c) to a path for the given SQL dialect /// The name of the field in dotted format /// The SQL dialect to use when converting the name to nested path format /// Whether to reference this path as a JSON value or a SQL value - /// A string with the path required to address the nested document value + /// A string with the path required to address the nested document value static member NameToPath (name: string) dialect format = let path = if name.Contains '.' then @@ -254,19 +255,19 @@ type Field = { else match format with AsJson -> $"->'{name}'" | AsSql -> $"->>'{name}'" $"data{path}" - + /// Create a field with a given name, but no other properties filled /// The field name, along with any other qualifications if used in a sorting context - /// Comparison will be Equal, value will be an empty string + /// Comparison will be Equal, value will be an empty string static member Named name = Field.Where name (Equal "") - + /// Specify the name of the parameter for this field - /// The parameter name (including : or @) + /// The parameter name (including : or @) /// A field with the given parameter name specified member this.WithParameterName name = { this with ParameterName = Some name } - + /// Specify a qualifier (alias) for the table from which this field will be referenced /// The table alias for this field comparison /// A field with the given qualifier specified @@ -276,7 +277,7 @@ type Field = { /// Get the qualified path to the field /// The SQL dialect to use when converting the name to nested path format /// Whether to reference this path as a JSON value or a SQL value - /// A string with the qualified path required to address the nested document value + /// A string with the qualified path required to address the nested document value member this.Path dialect format = (this.Qualifier |> Option.map (fun q -> $"{q}.") |> Option.defaultValue "") + Field.NameToPath this.Name dialect format @@ -285,13 +286,13 @@ type Field = { /// How fields should be matched [] type FieldMatch = - - /// Any field matches (OR) + + /// Any field matches (OR) | Any - - /// All fields match (AND) + + /// All fields match (AND) | All - + /// The SQL value implementing each matching strategy override this.ToString() = match this with Any -> "OR" | All -> "AND" @@ -299,10 +300,10 @@ type FieldMatch = /// Derive parameter names (each instance wraps a counter to uniquely name anonymous fields) type ParameterName() = - + /// The counter for the next field value let mutable currentIdx = -1 - + /// /// Return the specified name for the parameter, or an anonymous parameter name if none is specified /// @@ -319,30 +320,30 @@ type ParameterName() = /// Automatically-generated document ID strategies [] type AutoId = - + /// No automatic IDs will be generated | Disabled - + /// Generate a MAX-plus-1 numeric value for documents | Number - - /// Generate a GUID for each document (as a lowercase, no-dashes, 32-character string) + + /// Generate a GUID for each document (as a lowercase, no-dashes, 32-character string) | Guid - + /// Generate a random string of hexadecimal characters for each document | RandomString with - /// Generate a GUID string - /// A GUID string + /// Generate a GUID string + /// A GUID string static member GenerateGuid() = System.Guid.NewGuid().ToString "N" - + /// Generate a string of random hexadecimal characters /// The number of characters to generate /// A string of the given length with random hexadecimal characters static member GenerateRandomString(length: int) = RandomNumberGenerator.GetHexString(length, lowercase = true) - + /// Does the given document need an automatic ID generated? /// The auto-ID strategy currently in use /// The document being inserted @@ -387,26 +388,26 @@ with /// The required document serialization implementation type IDocumentSerializer = - + /// Serialize an object to a JSON string abstract Serialize<'T> : 'T -> string - + /// Deserialize a JSON string into an object abstract Deserialize<'T> : string -> 'T /// Document serializer defaults module DocumentSerializer = - + open System.Text.Json open System.Text.Json.Serialization - + /// The default JSON serializer options to use with the stock serializer let private jsonDefaultOpts = let o = JsonSerializerOptions() o.Converters.Add(JsonFSharpConverter()) o - + /// The default JSON serializer [] let ``default`` = @@ -424,7 +425,7 @@ module Configuration = /// The serializer to use for document manipulation let mutable private serializerValue = DocumentSerializer.``default`` - + /// Register a serializer to use for translating documents to domain types /// The serializer to use when manipulating documents [] @@ -436,46 +437,46 @@ module Configuration = [] let serializer () = serializerValue - + /// The serialized name of the ID field for documents let mutable private idFieldValue = "Id" - + /// Specify the name of the ID field for documents /// The name of the ID field for documents [] let useIdField it = idFieldValue <- it - + /// Retrieve the currently configured ID field for documents /// The currently configured ID field [] let idField () = idFieldValue - + /// The automatic ID strategy used by the library let mutable private autoIdValue = Disabled - + /// Specify the automatic ID generation strategy used by the library /// The automatic ID generation strategy to use [] let useAutoIdStrategy it = autoIdValue <- it - + /// Retrieve the currently configured automatic ID generation strategy /// The current automatic ID generation strategy [] let autoIdStrategy () = autoIdValue - + /// The length of automatically generated random strings let mutable private idStringLengthValue = 16 - + /// Specify the length of automatically generated random strings /// The length of automatically generated random strings [] let useIdStringLength length = idStringLengthValue <- length - + /// Retrieve the currently configured length of automatically generated random strings /// The current length of automatically generated random strings [] @@ -486,31 +487,31 @@ module Configuration = /// Query construction functions [] module Query = - - /// Combine a query (SELECT, UPDATE, etc.) and a WHERE clause + + /// Combine a query (SELECT, UPDATE, etc.) and a WHERE clause /// The first part of the statement - /// The WHERE clause for the statement - /// The two parts of the query combined with WHERE + /// The WHERE clause for the statement + /// The two parts of the query combined with WHERE [] let statementWhere statement where = $"%s{statement} WHERE %s{where}" - + /// Queries to define tables and indexes module Definition = - + /// SQL statement to create a document table /// The name of the table to create (may include schema) - /// The type of data for the column (JSON, JSONB, etc.) + /// The type of data for the column (JSON, JSONB, etc.) /// A query to create a document table [] let ensureTableFor name dataType = $"CREATE TABLE IF NOT EXISTS %s{name} (data %s{dataType} NOT NULL)" - + /// Split a schema and table name let private splitSchemaAndTable (tableName: string) = let parts = tableName.Split '.' if Array.length parts = 1 then "", tableName else parts[0], parts[1] - + /// SQL statement to create an index on one or more fields in a JSON document /// The table on which an index should be created (may include schema) /// The name of the index to be created @@ -537,7 +538,7 @@ module Query = [] let ensureKey tableName dialect = (ensureIndexOn tableName "key" [ Configuration.idField () ] dialect).Replace("INDEX", "UNIQUE INDEX") - + /// Query to insert a document /// The table into which to insert (may include schema) /// A query to insert a document @@ -554,48 +555,48 @@ module Query = let save tableName = sprintf "INSERT INTO %s VALUES (@data) ON CONFLICT ((data->>'%s')) DO UPDATE SET data = EXCLUDED.data" - tableName (Configuration.idField ()) - + tableName (Configuration.idField ()) + /// Query to count documents in a table /// The table in which to count documents (may include schema) /// A query to count documents - /// This query has no WHERE clause + /// This query has no WHERE clause [] let count tableName = $"SELECT COUNT(*) AS it FROM %s{tableName}" - + /// Query to check for document existence in a table /// The table in which existence should be checked (may include schema) - /// The WHERE clause with the existence criteria + /// The WHERE clause with the existence criteria /// A query to check document existence [] let exists tableName where = $"SELECT EXISTS (SELECT 1 FROM %s{tableName} WHERE %s{where}) AS it" - + /// Query to select documents from a table /// The table from which documents should be found (may include schema) /// A query to retrieve documents - /// This query has no WHERE clause + /// This query has no WHERE clause [] let find tableName = $"SELECT data FROM %s{tableName}" - + /// Query to update (replace) a document /// The table in which documents should be replaced (may include schema) /// A query to update documents - /// This query has no WHERE clause + /// This query has no WHERE clause [] let update tableName = $"UPDATE %s{tableName} SET data = @data" - + /// Query to delete documents from a table /// The table in which documents should be deleted (may include schema) /// A query to delete documents - /// This query has no WHERE clause + /// This query has no WHERE clause [] let delete tableName = $"DELETE FROM %s{tableName}" - + /// Create a SELECT clause to retrieve the document data from the given table /// The table from which documents should be found (may include schema) /// A query to retrieve documents @@ -603,11 +604,11 @@ module Query = [] let selectFromTable tableName = find tableName - - /// Create an ORDER BY clause for the given fields + + /// Create an ORDER BY clause for the given fields /// One or more fields by which to order /// The SQL dialect for the generated clause - /// An ORDER BY clause for the given fields + /// An ORDER BY clause for the given fields [] let orderBy fields dialect = if Seq.isEmpty fields then "" @@ -631,3 +632,49 @@ module Query = |> function path -> path + defaultArg direction "") |> String.concat ", " |> function it -> $" ORDER BY {it}" + + +#nowarn "FS3511" // "let rec" is not statically compilable + +open System.IO.Pipelines + +/// Functions that manipulate PipeWriters +[] +module PipeWriter = + + /// Write a UTF-8 string to this pipe + /// The PipeWriter to which the string should be written + /// The string to be written to the pipe + /// true if the pipe is still open, false if not + [] + let writeString (writer: PipeWriter) (text: string) = backgroundTask { + try + let! writeResult = writer.WriteAsync(Encoding.UTF8.GetBytes text) + return not writeResult.IsCompleted + with :? System.ObjectDisposedException -> return false + } + + /// Write an array of strings, abandoning the sequence if the pipe is closed + /// The PipeWriter to which the strings should be written + /// The strings to be written + /// true if the pipe is still open, false if not + [] + let writeStrings writer items = backgroundTask { + let theItems = Seq.cache items + let rec writeNext idx = backgroundTask { + match theItems |> Seq.tryItem idx with + | Some item -> + if idx > 0 then + let! _ = writeString writer "," + () + match! writeString writer item with + | true -> return! writeNext (idx + 1) + | false -> return false + | None -> return true + } + let! _ = writeString writer "[" + let! isCleanFinish = writeNext 0 + if isCleanFinish then + let! _ = writeString writer "]" + () + } diff --git a/src/Common/README.md b/src/Common/README.md index c0107b8..ba3aa52 100644 --- a/src/Common/README.md +++ b/src/Common/README.md @@ -8,11 +8,11 @@ This package provides common definitions and functionality for `BitBadger.Docume - Select, insert, update, save (upsert), delete, count, and check existence of documents, and create tables and indexes for these documents - Automatically generate IDs for documents (numeric IDs, GUIDs, or random strings) -- Addresses documents via ID and via comparison on any field (for PostgreSQL, also via equality on any property by using JSON containment, or via condition on any property using JSON Path queries) -- Accesses documents as your domain models (POCOs) -- Uses `Task`-based async for all data access functions -- Uses building blocks for more complex queries +- Address documents via ID and via comparison on any field (for PostgreSQL, also via equality on any property by using JSON containment, or via condition on any property using JSON Path queries) +- Access documents as your domain models (POCOs), as JSON strings, or as JSON written directly to a `PipeWriter` +- Use `Task`-based async for all data access functions +- Use building blocks for more complex queries ## Getting Started -Install the library of your choice and follow its README; also, the [project site](https://bitbadger.solutions/open-source/relational-documents/) has complete documentation. +Install the library of your choice and follow its README; also, the [project site](https://relationaldocs.bitbadger.solutions/dotnet/) has complete documentation. diff --git a/src/Directory.Build.props b/src/Directory.Build.props index 932d9dd..68d9ff6 100644 --- a/src/Directory.Build.props +++ b/src/Directory.Build.props @@ -3,16 +3,15 @@ net8.0;net9.0 embedded true - 4.0.1.0 - 4.0.1.0 - 4.0.1 - From v4.0: Add XML documention (IDE support) -From v3.1: See 4.0 release for breaking changes and compatibility + 4.1.0.0 + 4.1.0.0 + 4.1.0 + Add JSON retrieval and pipe-writing functions; update project URL to site with public API docs danieljsummers Bit Badger Solutions README.md icon.png - https://bitbadger.solutions/open-source/relational-documents/ + https://relationaldocs.bitbadger.solutions/dotnet/ false https://git.bitbadger.solutions/bit-badger/BitBadger.Documents Git diff --git a/src/Postgres/Extensions.fs b/src/Postgres/Extensions.fs index 568b51b..ebd9229 100644 --- a/src/Postgres/Extensions.fs +++ b/src/Postgres/Extensions.fs @@ -18,14 +18,38 @@ module Extensions = member conn.customList<'TDoc> query parameters (mapFunc: RowReader -> 'TDoc) = Custom.list<'TDoc> query parameters mapFunc (Sql.existingConnection conn) + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + member conn.customJsonArray query parameters mapFunc = + Custom.jsonArray query parameters mapFunc (Sql.existingConnection conn) + + /// Execute a query, writing its results to the given PipeWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The PipeWriter to which the results should be written + /// The mapping function to extract the document + member conn.writeCustomJsonArray query parameters writer mapFunc = + Custom.writeJsonArray query parameters writer mapFunc (Sql.existingConnection conn) + /// Execute a query that returns one or no results /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// Some with the first matching result, or None if not found + /// Some with the first matching result, or None if not found member conn.customSingle<'TDoc> query parameters (mapFunc: RowReader -> 'TDoc) = Custom.single<'TDoc> query parameters mapFunc (Sql.existingConnection conn) + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + member conn.customJsonSingle query parameters mapFunc = + Custom.jsonSingle query parameters mapFunc (Sql.existingConnection conn) + /// Execute a query that returns no results /// The query to retrieve the results /// Parameters to use for the query @@ -78,7 +102,7 @@ module Extensions = member conn.countAll tableName = Count.all tableName (Sql.existingConnection conn) - /// Count matching documents using JSON field comparisons (->> =, etc.) + /// Count matching documents using JSON field comparisons (->> =, etc.) /// The table in which documents should be counted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -86,14 +110,14 @@ module Extensions = member conn.countByFields tableName howMatched fields = Count.byFields tableName howMatched fields (Sql.existingConnection conn) - /// Count matching documents using a JSON containment query (@>) + /// Count matching documents using a JSON containment query (@>) /// The table in which documents should be counted (may include schema) /// The document to match with the containment query /// The count of the documents in the table member conn.countByContains tableName criteria = Count.byContains tableName criteria (Sql.existingConnection conn) - /// Count matching documents using a JSON Path match query (@?) + /// Count matching documents using a JSON Path match query (@?) /// The table in which documents should be counted (may include schema) /// The JSON Path expression to be matched /// The count of the documents in the table @@ -107,7 +131,7 @@ module Extensions = member conn.existsById tableName docId = Exists.byId tableName docId (Sql.existingConnection conn) - /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// Determine if a document exists using JSON field comparisons (->> =, etc.) /// The table in which existence should be checked (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -115,14 +139,14 @@ module Extensions = member conn.existsByFields tableName howMatched fields = Exists.byFields tableName howMatched fields (Sql.existingConnection conn) - /// Determine if a document exists using a JSON containment query (@>) + /// Determine if a document exists using a JSON containment query (@>) /// The table in which existence should be checked (may include schema) /// The document to match with the containment query /// True if any matching documents exist, false if not member conn.existsByContains tableName criteria = Exists.byContains tableName criteria (Sql.existingConnection conn) - /// Determine if a document exists using a JSON Path match query (@?) + /// Determine if a document exists using a JSON Path match query (@?) /// The table in which existence should be checked (may include schema) /// The JSON Path expression to be matched /// True if any matching documents exist, false if not @@ -145,11 +169,11 @@ module Extensions = /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// Some with the document if found, None otherwise + /// Some with the document if found, None otherwise member conn.findById<'TKey, 'TDoc> tableName docId = Find.byId<'TKey, 'TDoc> tableName docId (Sql.existingConnection conn) - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -158,8 +182,8 @@ module Extensions = Find.byFields<'TDoc> tableName howMatched fields (Sql.existingConnection conn) /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields - /// in the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in + /// the document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions @@ -170,7 +194,7 @@ module Extensions = Find.byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields (Sql.existingConnection conn) - /// Retrieve documents matching a JSON containment query (@>) + /// Retrieve documents matching a JSON containment query (@>) /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query /// All documents matching the given containment query @@ -178,7 +202,7 @@ module Extensions = Find.byContains<'TDoc> tableName criteria (Sql.existingConnection conn) /// - /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the + /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the /// document /// /// The table from which documents should be retrieved (may include schema) @@ -188,7 +212,7 @@ module Extensions = member conn.findByContainsOrdered<'TDoc> tableName (criteria: obj) orderFields = Find.byContainsOrdered<'TDoc> tableName criteria orderFields (Sql.existingConnection conn) - /// Retrieve documents matching a JSON Path match query (@?) + /// Retrieve documents matching a JSON Path match query (@?) /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match /// All documents matching the given JSON Path expression @@ -196,8 +220,7 @@ module Extensions = Find.byJsonPath<'TDoc> tableName jsonPath (Sql.existingConnection conn) /// - /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the - /// document + /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match @@ -206,69 +229,356 @@ module Extensions = member conn.findByJsonPathOrdered<'TDoc> tableName jsonPath orderFields = Find.byJsonPathOrdered<'TDoc> tableName jsonPath orderFields (Sql.existingConnection conn) - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// Some with the first document, or None if not found + /// Some with the first document, or None if not found member conn.findFirstByFields<'TDoc> tableName howMatched fields = Find.firstByFields<'TDoc> tableName howMatched fields (Sql.existingConnection conn) /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the - /// given fields in the document + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document /// /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered /// - /// Some with the first document ordered by the given fields, or None if not found + /// Some with the first document ordered by the given fields, or None if not found /// member conn.findFirstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = Find.firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields (Sql.existingConnection conn) - /// Retrieve the first document matching a JSON containment query (@>) + /// Retrieve the first document matching a JSON containment query (@>) /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query - /// Some with the first document, or None if not found + /// Some with the first document, or None if not found member conn.findFirstByContains<'TDoc> tableName (criteria: obj) = Find.firstByContains<'TDoc> tableName criteria (Sql.existingConnection conn) /// - /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields - /// in the document + /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in + /// the document /// /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered /// - /// Some with the first document ordered by the given fields, or None if not found + /// Some with the first document ordered by the given fields, or None if not found /// member conn.findFirstByContainsOrdered<'TDoc> tableName (criteria: obj) orderFields = Find.firstByContainsOrdered<'TDoc> tableName criteria orderFields (Sql.existingConnection conn) - /// Retrieve the first document matching a JSON Path match query (@?) + /// Retrieve the first document matching a JSON Path match query (@?) /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match - /// Some with the first document, or None if not found + /// Some with the first document, or None if not found member conn.findFirstByJsonPath<'TDoc> tableName jsonPath = Find.firstByJsonPath<'TDoc> tableName jsonPath (Sql.existingConnection conn) /// - /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in - /// the document + /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the + /// document /// /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered /// - /// Some with the first document ordered by the given fields, or None if not found + /// Some with the first document ordered by the given fields, or None if not found /// member conn.findFirstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields = Find.firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields (Sql.existingConnection conn) + /// Retrieve all documents in the given table as a JSON array + /// The table from which documents should be retrieved (may include schema) + /// All documents from the given table as a JSON array + member conn.jsonAll tableName = + Json.all tableName (Sql.existingConnection conn) + + /// Write all documents in the given table to the given PipeWriter + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + member conn.writeJsonAll tableName writer = + Json.writeAll tableName writer (Sql.existingConnection conn) + + /// + /// Retrieve all documents in the given table as a JSON array, ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All documents from the given table as a JSON array, ordered by the given fields + member conn.jsonAllOrdered tableName orderFields = + Json.allOrdered tableName orderFields (Sql.existingConnection conn) + + /// + /// Write all documents in the given table to the given PipeWriter, ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Fields by which the results should be ordered + member conn.writeJsonAllOrdered tableName writer orderFields = + Json.writeAllOrdered tableName writer orderFields (Sql.existingConnection conn) + + /// Retrieve a JSON document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The JSON document if found, an empty JSON document otherwise + member conn.jsonById<'TKey> tableName (docId: 'TKey) = + Json.byId tableName docId (Sql.existingConnection conn) + + /// Write a JSON document to the given PipeWriter by its ID + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The ID of the document to retrieve + member conn.writeJsonById<'TKey> tableName writer (docId: 'TKey) = + Json.writeById tableName writer docId (Sql.existingConnection conn) + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All JSON documents matching the given fields + member conn.jsonByFields tableName howMatched fields = + Json.byFields tableName howMatched fields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, + /// etc.) + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + member conn.writeJsonByFields tableName writer howMatched fields = + Json.writeByFields tableName writer howMatched fields (Sql.existingConnection conn) + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given fields, ordered by the other given fields + member conn.jsonByFieldsOrdered tableName howMatched queryFields orderFields = + Json.byFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, + /// etc.) ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + member conn.writeJsonByFieldsOrdered tableName writer howMatched queryFields orderFields = + Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields (Sql.existingConnection conn) + + /// Retrieve JSON documents matching a JSON containment query (@>) + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// All JSON documents matching the given containment query + member conn.jsonByContains tableName (criteria: obj) = + Json.byContains tableName criteria (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + member conn.writeJsonByContains tableName writer (criteria: obj) = + Json.writeByContains tableName writer criteria (Sql.existingConnection conn) + + /// + /// Retrieve JSON documents matching a JSON containment query (@>) ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// All documents matching the given containment query, ordered by the given fields + member conn.jsonByContainsOrdered tableName (criteria: obj) orderFields = + Json.byContainsOrdered tableName criteria orderFields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) ordered + /// by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + member conn.writeJsonByContainsOrdered tableName writer (criteria: obj) orderFields = + Json.writeByContainsOrdered tableName writer criteria orderFields (Sql.existingConnection conn) + + /// Retrieve JSON documents matching a JSON Path match query (@?) + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// All JSON documents matching the given JSON Path expression + member conn.jsonByJsonPath tableName jsonPath = + Json.byJsonPath tableName jsonPath (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The JSON Path expression to match + member conn.writeJsonByJsonPath tableName writer jsonPath = + Json.writeByJsonPath tableName writer jsonPath (Sql.existingConnection conn) + + /// + /// Retrieve JSON documents matching a JSON Path match query (@?) ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given JSON Path expression, ordered by the given fields + member conn.jsonByJsonPathOrdered tableName jsonPath orderFields = + Json.byJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) ordered by + /// the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The JSON Path expression to match + /// Fields by which the results should be ordered + member conn.writeJsonByJsonPathOrdered tableName writer jsonPath orderFields = + Json.writeByJsonPathOrdered tableName writer jsonPath orderFields (Sql.existingConnection conn) + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The first matching JSON document if found, an empty JSON document otherwise + member conn.jsonFirstByFields tableName howMatched fields = + Json.firstByFields tableName howMatched fields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons + /// (->> =, etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + member conn.writeJsonFirstByFields tableName writer howMatched fields = + Json.writeFirstByFields tableName writer howMatched fields (Sql.existingConnection conn) + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the + /// given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + member conn.jsonFirstByFieldsOrdered tableName howMatched queryFields orderFields = + Json.firstByFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + member conn.writeJsonFirstByFieldsOrdered tableName writer howMatched queryFields orderFields = + Json.writeFirstByFieldsOrdered + tableName writer howMatched queryFields orderFields (Sql.existingConnection conn) + + /// Retrieve the first JSON document matching a JSON containment query (@>) + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// The first matching JSON document if found, an empty JSON document otherwise + member conn.jsonFirstByContains tableName (criteria: obj) = + Json.firstByContains tableName criteria (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given PipeWriter matching a JSON containment query + /// (@>) + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + member conn.writeJsonFirstByContains tableName writer (criteria: obj) = + Json.writeFirstByContains tableName writer criteria (Sql.existingConnection conn) + + /// + /// Retrieve the first JSON document matching a JSON containment query (@>) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + member conn.jsonFirstByContainsOrdered tableName (criteria: obj) orderFields = + Json.firstByContainsOrdered tableName criteria orderFields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given PipeWriter matching a JSON containment query + /// (@>) ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + member conn.writeJsonFirstByContainsOrdered tableName writer (criteria: obj) orderFields = + Json.writeFirstByContainsOrdered tableName writer criteria orderFields (Sql.existingConnection conn) + + /// Retrieve the first JSON document matching a JSON Path match query (@?) + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// The first matching JSON document if found, an empty JSON document otherwise + member conn.jsonFirstByJsonPath tableName jsonPath = + Json.firstByJsonPath tableName jsonPath (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The JSON Path expression to match + member conn.writeJsonFirstByJsonPath tableName writer jsonPath = + Json.writeFirstByJsonPath tableName writer jsonPath (Sql.existingConnection conn) + + /// + /// Retrieve the first JSON document matching a JSON Path match query (@?) ordered by the given fields + /// in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + member conn.jsonFirstByJsonPathOrdered tableName jsonPath orderFields = + Json.firstByJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) + /// ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The JSON Path expression to match + /// Fields by which the results should be ordered + member conn.writeJsonFirstByJsonPathOrdered tableName writer jsonPath orderFields = + Json.writeFirstByJsonPathOrdered tableName writer jsonPath orderFields (Sql.existingConnection conn) + /// Update (replace) an entire document by its ID /// The table in which a document should be updated (may include schema) /// The ID of the document to be updated (replaced) @@ -294,7 +604,7 @@ module Extensions = Patch.byId tableName docId patch (Sql.existingConnection conn) /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, /// etc.) /// /// The table in which documents should be patched (may include schema) @@ -305,7 +615,7 @@ module Extensions = Patch.byFields tableName howMatched fields patch (Sql.existingConnection conn) /// - /// Patch documents using a JSON containment query in the WHERE clause (@>) + /// Patch documents using a JSON containment query in the WHERE clause (@>) /// /// The table in which documents should be patched (may include schema) /// The document to match the containment query @@ -313,7 +623,7 @@ module Extensions = member conn.patchByContains tableName (criteria: 'TCriteria) (patch: 'TPatch) = Patch.byContains tableName criteria patch (Sql.existingConnection conn) - /// Patch documents using a JSON Path match query in the WHERE clause (@?) + /// Patch documents using a JSON Path match query in the WHERE clause (@?) /// The table in which documents should be patched (may include schema) /// The JSON Path expression to match /// The partial document to patch the existing document @@ -335,14 +645,14 @@ module Extensions = member conn.removeFieldsByFields tableName howMatched fields fieldNames = RemoveFields.byFields tableName howMatched fields fieldNames (Sql.existingConnection conn) - /// Remove fields from documents via a JSON containment query (@>) + /// Remove fields from documents via a JSON containment query (@>) /// The table in which documents should be modified (may include schema) /// The document to match the containment query /// One or more field names to remove from the matching documents member conn.removeFieldsByContains tableName (criteria: 'TContains) fieldNames = RemoveFields.byContains tableName criteria fieldNames (Sql.existingConnection conn) - /// Remove fields from documents via a JSON Path match query (@?) + /// Remove fields from documents via a JSON Path match query (@?) /// The table in which documents should be modified (may include schema) /// The JSON Path expression to match /// One or more field names to remove from the matching documents @@ -355,14 +665,14 @@ module Extensions = member conn.deleteById tableName (docId: 'TKey) = Delete.byId tableName docId (Sql.existingConnection conn) - /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// Delete documents by matching a JSON field comparison query (->> =, etc.) /// The table in which documents should be deleted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match member conn.deleteByFields tableName howMatched fields = Delete.byFields tableName howMatched fields (Sql.existingConnection conn) - /// Delete documents by matching a JSON contains query (@>) + /// Delete documents by matching a JSON contains query (@>) /// The table in which documents should be deleted (may include schema) /// The document to match the containment query member conn.deleteByContains tableName (criteria: 'TContains) = @@ -381,7 +691,7 @@ open System.Runtime.CompilerServices type NpgsqlConnectionCSharpExtensions = /// Execute a query that returns a list of results - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item @@ -390,19 +700,49 @@ type NpgsqlConnectionCSharpExtensions = static member inline CustomList<'TDoc>(conn, query, parameters, mapFunc: System.Func) = Custom.List<'TDoc>(query, parameters, mapFunc, Sql.existingConnection conn) + /// Execute a query that returns a JSON array of results + /// The NpgsqlConnection on which to run the query + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + [] + static member inline CustomJsonArray(conn, query, parameters, mapFunc) = + Custom.JsonArray(query, parameters, mapFunc, Sql.existingConnection conn) + + /// Execute a query, writing its results to the given PipeWriter + /// The NpgsqlConnection on which to run the query + /// The query to retrieve the results + /// Parameters to use for the query + /// The PipeWriter to which the results should be written + /// The mapping function to extract the document + [] + static member inline WriteCustomJsonArray(conn, query, parameters, writer, mapFunc) = + Custom.WriteJsonArray(query, parameters, writer, mapFunc, Sql.existingConnection conn) + /// Execute a query that returns one or no results - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The first matching result, or null if not found + /// The first matching result, or null if not found [] static member inline CustomSingle<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, query, parameters, mapFunc: System.Func) = Custom.Single<'TDoc>(query, parameters, mapFunc, Sql.existingConnection conn) + /// Execute a query that returns one or no JSON documents + /// The NpgsqlConnection on which to run the query + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + [] + static member inline CustomJsonSingle(conn, query, parameters, mapFunc) = + Custom.JsonSingle(query, parameters, mapFunc, Sql.existingConnection conn) + /// Execute a query that returns no results - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The query to retrieve the results /// Parameters to use for the query [] @@ -410,7 +750,7 @@ type NpgsqlConnectionCSharpExtensions = Custom.nonQuery query parameters (Sql.existingConnection conn) /// Execute a query that returns a scalar value - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The query to retrieve the value /// Parameters to use for the query /// The mapping function to obtain the value @@ -421,14 +761,14 @@ type NpgsqlConnectionCSharpExtensions = Custom.Scalar(query, parameters, mapFunc, Sql.existingConnection conn) /// Create a document table - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table whose existence should be ensured (may include schema) [] static member inline EnsureTable(conn, name) = Definition.ensureTable name (Sql.existingConnection conn) /// Create an index on documents in the specified table - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table to be indexed (may include schema) /// The type of document index to create [] @@ -436,7 +776,7 @@ type NpgsqlConnectionCSharpExtensions = Definition.ensureDocumentIndex name idxType (Sql.existingConnection conn) /// Create an index on field(s) within documents in the specified table - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table to be indexed (may include schema) /// The name of the index to create /// One or more fields to be indexed @@ -445,7 +785,7 @@ type NpgsqlConnectionCSharpExtensions = Definition.ensureFieldIndex tableName indexName fields (Sql.existingConnection conn) /// Insert a new document - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table into which the document should be inserted (may include schema) /// The document to be inserted [] @@ -453,7 +793,7 @@ type NpgsqlConnectionCSharpExtensions = insert<'TDoc> tableName document (Sql.existingConnection conn) /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table into which the document should be saved (may include schema) /// The document to be saved [] @@ -461,15 +801,15 @@ type NpgsqlConnectionCSharpExtensions = save<'TDoc> tableName document (Sql.existingConnection conn) /// Count all documents in a table - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be counted (may include schema) /// The count of the documents in the table [] static member inline CountAll(conn, tableName) = Count.all tableName (Sql.existingConnection conn) - /// Count matching documents using JSON field comparisons (->> =, etc.) - /// The NpgsqlConnection on which to run the query + /// Count matching documents using JSON field comparisons (->> =, etc.) + /// The NpgsqlConnection on which to run the query /// The table in which documents should be counted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -478,8 +818,8 @@ type NpgsqlConnectionCSharpExtensions = static member inline CountByFields(conn, tableName, howMatched, fields) = Count.byFields tableName howMatched fields (Sql.existingConnection conn) - /// Count matching documents using a JSON containment query (@>) - /// The NpgsqlConnection on which to run the query + /// Count matching documents using a JSON containment query (@>) + /// The NpgsqlConnection on which to run the query /// The table in which documents should be counted (may include schema) /// The document to match with the containment query /// The count of the documents in the table @@ -487,8 +827,8 @@ type NpgsqlConnectionCSharpExtensions = static member inline CountByContains(conn, tableName, criteria: 'TCriteria) = Count.byContains tableName criteria (Sql.existingConnection conn) - /// Count matching documents using a JSON Path match query (@?) - /// The NpgsqlConnection on which to run the query + /// Count matching documents using a JSON Path match query (@?) + /// The NpgsqlConnection on which to run the query /// The table in which documents should be counted (may include schema) /// The JSON Path expression to be matched /// The count of the documents in the table @@ -497,7 +837,7 @@ type NpgsqlConnectionCSharpExtensions = Count.byJsonPath tableName jsonPath (Sql.existingConnection conn) /// Determine if a document exists for the given ID - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which existence should be checked (may include schema) /// The ID of the document whose existence should be checked /// True if a document exists, false if not @@ -505,8 +845,8 @@ type NpgsqlConnectionCSharpExtensions = static member inline ExistsById(conn, tableName, docId) = Exists.byId tableName docId (Sql.existingConnection conn) - /// Determine if a document exists using JSON field comparisons (->> =, etc.) - /// The NpgsqlConnection on which to run the query + /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// The NpgsqlConnection on which to run the query /// The table in which existence should be checked (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -515,8 +855,8 @@ type NpgsqlConnectionCSharpExtensions = static member inline ExistsByFields(conn, tableName, howMatched, fields) = Exists.byFields tableName howMatched fields (Sql.existingConnection conn) - /// Determine if a document exists using a JSON containment query (@>) - /// The NpgsqlConnection on which to run the query + /// Determine if a document exists using a JSON containment query (@>) + /// The NpgsqlConnection on which to run the query /// The table in which existence should be checked (may include schema) /// The document to match with the containment query /// True if any matching documents exist, false if not @@ -524,8 +864,8 @@ type NpgsqlConnectionCSharpExtensions = static member inline ExistsByContains(conn, tableName, criteria: 'TCriteria) = Exists.byContains tableName criteria (Sql.existingConnection conn) - /// Determine if a document exists using a JSON Path match query (@?) - /// The NpgsqlConnection on which to run the query + /// Determine if a document exists using a JSON Path match query (@?) + /// The NpgsqlConnection on which to run the query /// The table in which existence should be checked (may include schema) /// The JSON Path expression to be matched /// True if any matching documents exist, false if not @@ -534,7 +874,7 @@ type NpgsqlConnectionCSharpExtensions = Exists.byJsonPath tableName jsonPath (Sql.existingConnection conn) /// Retrieve all documents in the given table - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// All documents from the given table [] @@ -542,7 +882,7 @@ type NpgsqlConnectionCSharpExtensions = Find.All<'TDoc>(tableName, Sql.existingConnection conn) /// Retrieve all documents in the given table ordered by the given fields in the document - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// Fields by which the results should be ordered /// All documents from the given table, ordered by the given fields @@ -551,16 +891,16 @@ type NpgsqlConnectionCSharpExtensions = Find.AllOrdered<'TDoc>(tableName, orderFields, Sql.existingConnection conn) /// Retrieve a document by its ID - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// The document if found, null otherwise + /// The document if found, null otherwise [] static member inline FindById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(conn, tableName, docId: 'TKey) = Find.ById<'TKey, 'TDoc>(tableName, docId, Sql.existingConnection conn) - /// Retrieve documents matching JSON field comparisons (->> =, etc.) - /// The NpgsqlConnection on which to run the query + /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -570,10 +910,10 @@ type NpgsqlConnectionCSharpExtensions = Find.ByFields<'TDoc>(tableName, howMatched, fields, Sql.existingConnection conn) /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in - /// the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -584,8 +924,8 @@ type NpgsqlConnectionCSharpExtensions = Find.ByFieldsOrdered<'TDoc>( tableName, howMatched, queryFields, orderFields, Sql.existingConnection conn) - /// Retrieve documents matching a JSON containment query (@>) - /// The NpgsqlConnection on which to run the query + /// Retrieve documents matching a JSON containment query (@>) + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query /// All documents matching the given containment query @@ -594,10 +934,9 @@ type NpgsqlConnectionCSharpExtensions = Find.ByContains<'TDoc>(tableName, criteria, Sql.existingConnection conn) /// - /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the - /// document + /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the document /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered @@ -606,8 +945,8 @@ type NpgsqlConnectionCSharpExtensions = static member inline FindByContainsOrdered<'TDoc>(conn, tableName, criteria: obj, orderFields) = Find.ByContainsOrdered<'TDoc>(tableName, criteria, orderFields, Sql.existingConnection conn) - /// Retrieve documents matching a JSON Path match query (@?) - /// The NpgsqlConnection on which to run the query + /// Retrieve documents matching a JSON Path match query (@?) + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match /// All documents matching the given JSON Path expression @@ -616,9 +955,9 @@ type NpgsqlConnectionCSharpExtensions = Find.ByJsonPath<'TDoc>(tableName, jsonPath, Sql.existingConnection conn) /// - /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document + /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered @@ -627,82 +966,424 @@ type NpgsqlConnectionCSharpExtensions = static member inline FindByJsonPathOrdered<'TDoc>(conn, tableName, jsonPath, orderFields) = Find.ByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, Sql.existingConnection conn) - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) - /// The NpgsqlConnection on which to run the query + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The first document, or null if not found + /// The first document, or null if not found [] static member inline FindFirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, tableName, howMatched, fields) = Find.FirstByFields<'TDoc>(tableName, howMatched, fields, Sql.existingConnection conn) /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given /// fields in the document /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found + /// The first document ordered by the given fields, or null if not found [] static member inline FindFirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, tableName, howMatched, queryFields, orderFields) = Find.FirstByFieldsOrdered<'TDoc>( tableName, howMatched, queryFields, orderFields, Sql.existingConnection conn) - /// Retrieve the first document matching a JSON containment query (@>) - /// The NpgsqlConnection on which to run the query + /// Retrieve the first document matching a JSON containment query (@>) + /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query - /// The first document, or null if not found + /// The first document, or null if not found [] static member inline FindFirstByContains<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, tableName, criteria: obj) = Find.FirstByContains<'TDoc>(tableName, criteria, Sql.existingConnection conn) /// - /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in - /// the document + /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the + /// document /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found + /// The first document ordered by the given fields, or null if not found [] static member inline FindFirstByContainsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, tableName, criteria: obj, orderFields) = Find.FirstByContainsOrdered<'TDoc>(tableName, criteria, orderFields, Sql.existingConnection conn) - /// Retrieve the first document matching a JSON Path match query (@?) - /// The NpgsqlConnection on which to run the query + /// Retrieve the first document matching a JSON Path match query (@?) + /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match - /// The first document, or null if not found + /// The first document, or null if not found [] static member inline FindFirstByJsonPath<'TDoc when 'TDoc: null and 'TDoc: not struct>(conn, tableName, jsonPath) = Find.FirstByJsonPath<'TDoc>(tableName, jsonPath, Sql.existingConnection conn) /// - /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the + /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the /// document /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found + /// The first document ordered by the given fields, or null if not found [] static member inline FindFirstByJsonPathOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, tableName, jsonPath, orderFields) = Find.FirstByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, Sql.existingConnection conn) + /// Retrieve all documents in the given table as a JSON array + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// All documents from the given table as a JSON array + [] + static member inline JsonAll(conn, tableName) = + Json.all tableName (Sql.existingConnection conn) + + /// Write all documents in the given table to the given PipeWriter + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + [] + static member inline WriteJsonAll(conn, tableName, writer) = + Json.writeAll tableName writer (Sql.existingConnection conn) + + /// + /// Retrieve all documents in the given table as a JSON array, ordered by the given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All documents from the given table as a JSON array, ordered by the given fields + [] + static member inline JsonAllOrdered(conn, tableName, orderFields) = + Json.allOrdered tableName orderFields (Sql.existingConnection conn) + + /// + /// Write all documents in the given table to the given PipeWriter, ordered by the given fields in the + /// document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Fields by which the results should be ordered + [] + static member inline WriteJsonAllOrdered(conn, tableName, writer, orderFields) = + Json.writeAllOrdered tableName writer orderFields (Sql.existingConnection conn) + + /// Retrieve a JSON document by its ID + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The JSON document if found, an empty JSON document otherwise + [] + static member inline JsonById<'TKey>(conn, tableName, docId: 'TKey) = + Json.byId tableName docId (Sql.existingConnection conn) + + /// Write a JSON document to the given PipeWriter by its ID + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The ID of the document to retrieve + [] + static member inline WriteJsonById<'TKey>(conn, tableName, writer, docId) = + Json.writeById tableName writer docId (Sql.existingConnection conn) + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All JSON documents matching the given fields + [] + static member inline JsonByFields(conn, tableName, howMatched, fields) = + Json.byFields tableName howMatched fields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + static member inline WriteJsonByFields(conn, tableName, writer, howMatched, fields) = + Json.writeByFields tableName writer howMatched fields (Sql.existingConnection conn) + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given fields + /// in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given fields, ordered by the other given fields + [] + static member inline JsonByFieldsOrdered(conn, tableName, howMatched, queryFields, orderFields) = + Json.byFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) + /// ordered by the given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + static member inline WriteJsonByFieldsOrdered(conn, tableName, writer, howMatched, queryFields, orderFields) = + Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields (Sql.existingConnection conn) + + /// Retrieve JSON documents matching a JSON containment query (@>) + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// All JSON documents matching the given containment query + [] + static member inline JsonByContains(conn, tableName, criteria: obj) = + Json.byContains tableName criteria (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + [] + static member inline WriteJsonByContains(conn, tableName, writer, criteria: obj) = + Json.writeByContains tableName writer criteria (Sql.existingConnection conn) + + /// + /// Retrieve JSON documents matching a JSON containment query (@>) ordered by the given fields in the + /// document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// All documents matching the given containment query, ordered by the given fields + [] + static member inline JsonByContainsOrdered(conn, tableName, criteria: obj, orderFields) = + Json.byContainsOrdered tableName criteria orderFields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) ordered by + /// the given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + [] + static member inline WriteJsonByContainsOrdered(conn, tableName, writer, criteria: obj, orderFields) = + Json.writeByContainsOrdered tableName writer criteria orderFields (Sql.existingConnection conn) + + /// Retrieve JSON documents matching a JSON Path match query (@?) + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// All JSON documents matching the given JSON Path expression + [] + static member inline JsonByJsonPath(conn, tableName, jsonPath) = + Json.byJsonPath tableName jsonPath (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The JSON Path expression to match + [] + static member inline WriteJsonByJsonPath(conn, tableName, writer, jsonPath) = + Json.writeByJsonPath tableName writer jsonPath (Sql.existingConnection conn) + + /// + /// Retrieve JSON documents matching a JSON Path match query (@?) ordered by the given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given JSON Path expression, ordered by the given fields + [] + static member inline JsonByJsonPathOrdered(conn, tableName, jsonPath, orderFields) = + Json.byJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) ordered by the + /// given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The JSON Path expression to match + /// Fields by which the results should be ordered + [] + static member inline WriteJsonByJsonPathOrdered(conn, tableName, writer, jsonPath, orderFields) = + Json.writeByJsonPathOrdered tableName writer jsonPath orderFields (Sql.existingConnection conn) + + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The first matching JSON document if found, an empty JSON document otherwise + [] + static member inline JsonFirstByFields(conn, tableName, howMatched, fields) = + Json.firstByFields tableName howMatched fields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons + /// (->> =, etc.) + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + static member inline WriteJsonFirstByFields(conn, tableName, writer, howMatched, fields) = + Json.writeFirstByFields tableName writer howMatched fields (Sql.existingConnection conn) + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the + /// given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + [] + static member inline JsonFirstByFieldsOrdered(conn, tableName, howMatched, queryFields, orderFields) = + Json.firstByFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + static member inline WriteJsonFirstByFieldsOrdered(conn, tableName, writer, howMatched, queryFields, orderFields) = + Json.writeFirstByFieldsOrdered + tableName writer howMatched queryFields orderFields (Sql.existingConnection conn) + + /// Retrieve the first JSON document matching a JSON containment query (@>) + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + static member inline JsonFirstByContains(conn, tableName, criteria: obj) = + Json.firstByContains tableName criteria (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>) + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + [] + static member inline WriteJsonFirstByContains(conn, tableName, writer, criteria: obj) = + Json.writeFirstByContains tableName writer criteria (Sql.existingConnection conn) + + /// + /// Retrieve the first JSON document matching a JSON containment query (@>) ordered by the given fields in + /// the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + [] + static member inline JsonFirstByContainsOrdered(conn, tableName, criteria: obj, orderFields) = + Json.firstByContainsOrdered tableName criteria orderFields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>) + /// ordered by the given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + [] + static member inline WriteJsonFirstByContainsOrdered(conn, tableName, writer, criteria: obj, orderFields) = + Json.writeFirstByContainsOrdered tableName writer criteria orderFields (Sql.existingConnection conn) + + /// Retrieve the first JSON document matching a JSON Path match query (@?) + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// The first matching JSON document if found, an empty JSON document otherwise + [] + static member inline JsonFirstByJsonPath(conn, tableName, jsonPath) = + Json.firstByJsonPath tableName jsonPath (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The JSON Path expression to match + [] + static member inline WriteJsonFirstByJsonPath(conn, tableName, writer, jsonPath) = + Json.writeFirstByJsonPath tableName writer jsonPath (Sql.existingConnection conn) + + /// + /// Retrieve the first JSON document matching a JSON Path match query (@?) ordered by the given fields in the + /// document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + [] + static member inline JsonFirstByJsonPathOrdered(conn, tableName, jsonPath, orderFields) = + Json.firstByJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) + /// ordered by the given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The JSON Path expression to match + /// Fields by which the results should be ordered + [] + static member inline WriteJsonFirstByJsonPathOrdered(conn, tableName, writer, jsonPath, orderFields) = + Json.writeFirstByJsonPathOrdered tableName writer jsonPath orderFields (Sql.existingConnection conn) + /// Update (replace) an entire document by its ID - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which a document should be updated (may include schema) /// The ID of the document to be updated (replaced) /// The new document @@ -713,7 +1394,7 @@ type NpgsqlConnectionCSharpExtensions = /// /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which a document should be updated (may include schema) /// The function to obtain the ID of the document /// The new document @@ -722,7 +1403,7 @@ type NpgsqlConnectionCSharpExtensions = Update.ByFunc(tableName, idFunc, document, Sql.existingConnection conn) /// Patch a document by its ID - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which a document should be patched (may include schema) /// The ID of the document to patch /// The partial document to patch the existing document @@ -731,9 +1412,9 @@ type NpgsqlConnectionCSharpExtensions = Patch.byId tableName docId patch (Sql.existingConnection conn) /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be patched (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -742,8 +1423,8 @@ type NpgsqlConnectionCSharpExtensions = static member inline PatchByFields(conn, tableName, howMatched, fields, patch: 'TPatch) = Patch.byFields tableName howMatched fields patch (Sql.existingConnection conn) - /// Patch documents using a JSON containment query in the WHERE clause (@>) - /// The NpgsqlConnection on which to run the query + /// Patch documents using a JSON containment query in the WHERE clause (@>) + /// The NpgsqlConnection on which to run the query /// The table in which documents should be patched (may include schema) /// The document to match the containment query /// The partial document to patch the existing document @@ -751,8 +1432,8 @@ type NpgsqlConnectionCSharpExtensions = static member inline PatchByContains(conn, tableName, criteria: 'TCriteria, patch: 'TPatch) = Patch.byContains tableName criteria patch (Sql.existingConnection conn) - /// Patch documents using a JSON Path match query in the WHERE clause (@?) - /// The NpgsqlConnection on which to run the query + /// Patch documents using a JSON Path match query in the WHERE clause (@?) + /// The NpgsqlConnection on which to run the query /// The table in which documents should be patched (may include schema) /// The JSON Path expression to match /// The partial document to patch the existing document @@ -761,7 +1442,7 @@ type NpgsqlConnectionCSharpExtensions = Patch.byJsonPath tableName jsonPath patch (Sql.existingConnection conn) /// Remove fields from a document by the document's ID - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which a document should be modified (may include schema) /// The ID of the document to modify /// One or more field names to remove from the document @@ -770,7 +1451,7 @@ type NpgsqlConnectionCSharpExtensions = RemoveFields.byId tableName docId fieldNames (Sql.existingConnection conn) /// Remove fields from documents via a comparison on JSON fields in the document - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be modified (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -779,8 +1460,8 @@ type NpgsqlConnectionCSharpExtensions = static member inline RemoveFieldsByFields(conn, tableName, howMatched, fields, fieldNames) = RemoveFields.byFields tableName howMatched fields fieldNames (Sql.existingConnection conn) - /// Remove fields from documents via a JSON containment query (@>) - /// The NpgsqlConnection on which to run the query + /// Remove fields from documents via a JSON containment query (@>) + /// The NpgsqlConnection on which to run the query /// The table in which documents should be modified (may include schema) /// The document to match the containment query /// One or more field names to remove from the matching documents @@ -788,8 +1469,8 @@ type NpgsqlConnectionCSharpExtensions = static member inline RemoveFieldsByContains(conn, tableName, criteria: 'TContains, fieldNames) = RemoveFields.byContains tableName criteria fieldNames (Sql.existingConnection conn) - /// Remove fields from documents via a JSON Path match query (@?) - /// The NpgsqlConnection on which to run the query + /// Remove fields from documents via a JSON Path match query (@?) + /// The NpgsqlConnection on which to run the query /// The table in which documents should be modified (may include schema) /// The JSON Path expression to match /// One or more field names to remove from the matching documents @@ -798,15 +1479,15 @@ type NpgsqlConnectionCSharpExtensions = RemoveFields.byJsonPath tableName jsonPath fieldNames (Sql.existingConnection conn) /// Delete a document by its ID - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which a document should be deleted (may include schema) /// The ID of the document to delete [] static member inline DeleteById(conn, tableName, docId: 'TKey) = Delete.byId tableName docId (Sql.existingConnection conn) - /// Delete documents by matching a JSON field comparison query (->> =, etc.) - /// The NpgsqlConnection on which to run the query + /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// The NpgsqlConnection on which to run the query /// The table in which documents should be deleted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -814,8 +1495,8 @@ type NpgsqlConnectionCSharpExtensions = static member inline DeleteByFields(conn, tableName, howMatched, fields) = Delete.byFields tableName howMatched fields (Sql.existingConnection conn) - /// Delete documents by matching a JSON contains query (@>) - /// The NpgsqlConnection on which to run the query + /// Delete documents by matching a JSON contains query (@>) + /// The NpgsqlConnection on which to run the query /// The table in which documents should be deleted (may include schema) /// The document to match the containment query [] @@ -823,7 +1504,7 @@ type NpgsqlConnectionCSharpExtensions = Delete.byContains tableName criteria (Sql.existingConnection conn) /// Delete documents by matching a JSON Path match query (@?) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be deleted (may include schema) /// The JSON Path expression to match [] diff --git a/src/Postgres/Functions.fs b/src/Postgres/Functions.fs index fb189e9..cf4d9f6 100644 --- a/src/Postgres/Functions.fs +++ b/src/Postgres/Functions.fs @@ -21,11 +21,45 @@ module Custom = let List<'TDoc>(query, parameters, mapFunc: System.Func) = WithProps.Custom.List<'TDoc>(query, parameters, mapFunc, fromDataSource ()) + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + [] + let jsonArray query parameters mapFunc = + WithProps.Custom.jsonArray query parameters mapFunc (fromDataSource ()) + + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + let JsonArray(query, parameters, mapFunc) = + WithProps.Custom.JsonArray(query, parameters, mapFunc, fromDataSource ()) + + /// Execute a query, writing its results to the given PipeWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The PipeWriter to which the results should be written + /// The mapping function to extract the document + [] + let writeJsonArray query parameters writer mapFunc = + WithProps.Custom.writeJsonArray query parameters writer mapFunc (fromDataSource ()) + + /// Execute a query, writing its results to the given PipeWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The PipeWriter to which the results should be written + /// The mapping function to extract the document + let WriteJsonArray(query, parameters, writer, mapFunc) = + WithProps.Custom.WriteJsonArray(query, parameters, writer, mapFunc, fromDataSource ()) + /// Execute a query that returns one or no results /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// Some with the first matching result, or None if not found + /// Some with the first matching result, or None if not found [] let single<'TDoc> query parameters (mapFunc: RowReader -> 'TDoc) = WithProps.Custom.single<'TDoc> query parameters mapFunc (fromDataSource ()) @@ -34,11 +68,28 @@ module Custom = /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The first matching result, or null if not found + /// The first matching result, or null if not found let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>( query, parameters, mapFunc: System.Func) = WithProps.Custom.Single<'TDoc>(query, parameters, mapFunc, fromDataSource ()) + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + [] + let jsonSingle query parameters mapFunc = + WithProps.Custom.jsonSingle query parameters mapFunc (fromDataSource ()) + + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + let JsonSingle(query, parameters, mapFunc) = + WithProps.Custom.JsonSingle(query, parameters, mapFunc, fromDataSource ()) + /// Execute a query that returns no results /// The query to retrieve the results /// Parameters to use for the query @@ -120,7 +171,7 @@ module Count = let all tableName = WithProps.Count.all tableName (fromDataSource ()) - /// Count matching documents using JSON field comparisons (->> =, etc.) + /// Count matching documents using JSON field comparisons (->> =, etc.) /// The table in which documents should be counted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -129,7 +180,7 @@ module Count = let byFields tableName howMatched fields = WithProps.Count.byFields tableName howMatched fields (fromDataSource ()) - /// Count matching documents using a JSON containment query (@>) + /// Count matching documents using a JSON containment query (@>) /// The table in which documents should be counted (may include schema) /// The document to match with the containment query /// The count of the documents in the table @@ -137,7 +188,7 @@ module Count = let byContains tableName criteria = WithProps.Count.byContains tableName criteria (fromDataSource ()) - /// Count matching documents using a JSON Path match query (@?) + /// Count matching documents using a JSON Path match query (@?) /// The table in which documents should be counted (may include schema) /// The JSON Path expression to be matched /// The count of the documents in the table @@ -158,7 +209,7 @@ module Exists = let byId tableName docId = WithProps.Exists.byId tableName docId (fromDataSource ()) - /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// Determine if a document exists using JSON field comparisons (->> =, etc.) /// The table in which existence should be checked (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -167,7 +218,7 @@ module Exists = let byFields tableName howMatched fields = WithProps.Exists.byFields tableName howMatched fields (fromDataSource ()) - /// Determine if a document exists using a JSON containment query (@>) + /// Determine if a document exists using a JSON containment query (@>) /// The table in which existence should be checked (may include schema) /// The document to match with the containment query /// True if any matching documents exist, false if not @@ -175,7 +226,7 @@ module Exists = let byContains tableName criteria = WithProps.Exists.byContains tableName criteria (fromDataSource ()) - /// Determine if a document exists using a JSON Path match query (@?) + /// Determine if a document exists using a JSON Path match query (@?) /// The table in which existence should be checked (may include schema) /// The JSON Path expression to be matched /// True if any matching documents exist, false if not @@ -184,7 +235,7 @@ module Exists = WithProps.Exists.byJsonPath tableName jsonPath (fromDataSource ()) -/// Commands to retrieve documents +/// Commands to retrieve documents as domain objects [] module Find = @@ -219,7 +270,7 @@ module Find = /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// Some with the document if found, None otherwise + /// Some with the document if found, None otherwise [] let byId<'TKey, 'TDoc> tableName docId = WithProps.Find.byId<'TKey, 'TDoc> tableName docId (fromDataSource ()) @@ -227,11 +278,11 @@ module Find = /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// The document if found, null otherwise + /// The document if found, null otherwise let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId: 'TKey) = WithProps.Find.ById<'TKey, 'TDoc>(tableName, docId, fromDataSource ()) - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -240,7 +291,7 @@ module Find = let byFields<'TDoc> tableName howMatched fields = WithProps.Find.byFields<'TDoc> tableName howMatched fields (fromDataSource ()) - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -249,8 +300,8 @@ module Find = WithProps.Find.ByFields<'TDoc>(tableName, howMatched, fields, fromDataSource ()) /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in - /// the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions @@ -262,8 +313,8 @@ module Find = WithProps.Find.byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields (fromDataSource ()) /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in - /// the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions @@ -273,7 +324,7 @@ module Find = let ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields) = WithProps.Find.ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, fromDataSource ()) - /// Retrieve documents matching a JSON containment query (@>) + /// Retrieve documents matching a JSON containment query (@>) /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query /// All documents matching the given containment query @@ -281,7 +332,7 @@ module Find = let byContains<'TDoc> tableName (criteria: obj) = WithProps.Find.byContains<'TDoc> tableName criteria (fromDataSource ()) - /// Retrieve documents matching a JSON containment query (@>) + /// Retrieve documents matching a JSON containment query (@>) /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query /// All documents matching the given containment query @@ -289,8 +340,7 @@ module Find = WithProps.Find.ByContains<'TDoc>(tableName, criteria, fromDataSource ()) /// - /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the - /// document + /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query @@ -301,8 +351,7 @@ module Find = WithProps.Find.byContainsOrdered<'TDoc> tableName criteria orderFields (fromDataSource ()) /// - /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the - /// document + /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query @@ -311,7 +360,7 @@ module Find = let ByContainsOrdered<'TDoc>(tableName, criteria: obj, orderFields) = WithProps.Find.ByContainsOrdered<'TDoc>(tableName, criteria, orderFields, fromDataSource ()) - /// Retrieve documents matching a JSON Path match query (@?) + /// Retrieve documents matching a JSON Path match query (@?) /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match /// All documents matching the given JSON Path expression @@ -319,7 +368,7 @@ module Find = let byJsonPath<'TDoc> tableName jsonPath = WithProps.Find.byJsonPath<'TDoc> tableName jsonPath (fromDataSource ()) - /// Retrieve documents matching a JSON Path match query (@?) + /// Retrieve documents matching a JSON Path match query (@?) /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match /// All documents matching the given JSON Path expression @@ -327,7 +376,7 @@ module Find = WithProps.Find.ByJsonPath<'TDoc>(tableName, jsonPath, fromDataSource ()) /// - /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document + /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match @@ -338,7 +387,7 @@ module Find = WithProps.Find.byJsonPathOrdered<'TDoc> tableName jsonPath orderFields (fromDataSource ()) /// - /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document + /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match @@ -347,132 +396,442 @@ module Find = let ByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields) = WithProps.Find.ByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, fromDataSource ()) - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// Some with the first document, or None if not found + /// Some with the first document, or None if not found [] let firstByFields<'TDoc> tableName howMatched fields = WithProps.Find.firstByFields<'TDoc> tableName howMatched fields (fromDataSource ()) - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The first document, or null if not found + /// The first document, or null if not found let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields) = WithProps.Find.FirstByFields<'TDoc>(tableName, howMatched, fields, fromDataSource ()) /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given /// fields in the document /// /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// - /// Some with the first document ordered by the given fields, or None if not found - /// + /// Some with the first document ordered by the given fields, or None if not found [] let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = WithProps.Find.firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields (fromDataSource ()) /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given /// fields in the document /// /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found + /// The first document ordered by the given fields, or null if not found let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( tableName, howMatched, queryFields, orderFields) = WithProps.Find.FirstByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, fromDataSource ()) - /// Retrieve the first document matching a JSON containment query (@>) + /// Retrieve the first document matching a JSON containment query (@>) /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query - /// Some with the first document, or None if not found + /// Some with the first document, or None if not found [] let firstByContains<'TDoc> tableName (criteria: obj) = WithProps.Find.firstByContains<'TDoc> tableName criteria (fromDataSource ()) - /// Retrieve the first document matching a JSON containment query (@>) + /// Retrieve the first document matching a JSON containment query (@>) /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query - /// The first document, or null if not found + /// The first document, or null if not found let FirstByContains<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, criteria: obj) = WithProps.Find.FirstByContains<'TDoc>(tableName, criteria, fromDataSource ()) /// - /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in - /// the document + /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the + /// document /// /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered - /// - /// Some with the first document ordered by the given fields, or None if not found - /// + /// Some with the first document ordered by the given fields, or None if not found [] let firstByContainsOrdered<'TDoc> tableName (criteria: obj) orderFields = WithProps.Find.firstByContainsOrdered<'TDoc> tableName criteria orderFields (fromDataSource ()) /// - /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in - /// the document + /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the + /// document /// /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found + /// The first document ordered by the given fields, or null if not found let FirstByContainsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, criteria: obj, orderFields) = WithProps.Find.FirstByContainsOrdered<'TDoc>(tableName, criteria, orderFields, fromDataSource ()) - /// Retrieve the first document matching a JSON Path match query (@?) + /// Retrieve the first document matching a JSON Path match query (@?) /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match - /// Some with the first document, or None if not found + /// Some with the first document, or None if not found [] let firstByJsonPath<'TDoc> tableName jsonPath = WithProps.Find.firstByJsonPath<'TDoc> tableName jsonPath (fromDataSource ()) - /// Retrieve the first document matching a JSON Path match query (@?) + /// Retrieve the first document matching a JSON Path match query (@?) /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match - /// The first document, or null if not found + /// The first document, or null if not found let FirstByJsonPath<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, jsonPath) = WithProps.Find.FirstByJsonPath<'TDoc>(tableName, jsonPath, fromDataSource ()) /// - /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the + /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the /// document /// /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered - /// - /// Some with the first document ordered by the given fields, or None if not found - /// + /// Some with the first document ordered by the given fields, or None if not found [] let firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields = WithProps.Find.firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields (fromDataSource ()) /// - /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the + /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the /// document /// /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found + /// The first document ordered by the given fields, or null if not found let FirstByJsonPathOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, jsonPath, orderFields) = WithProps.Find.FirstByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, fromDataSource ()) +/// Commands to retrieve documents as JSON +[] +module Json = + + /// Retrieve all documents in the given table as a JSON array + /// The table from which documents should be retrieved (may include schema) + /// All documents from the given table as a JSON array + [] + let all tableName = + WithProps.Json.all tableName (fromDataSource ()) + + /// Write all documents in the given table to the given PipeWriter + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + [] + let writeAll tableName writer = + WithProps.Json.writeAll tableName writer (fromDataSource ()) + + /// + /// Retrieve all documents in the given table as a JSON array, ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All documents from the given table as a JSON array, ordered by the given fields + [] + let allOrdered tableName orderFields = + WithProps.Json.allOrdered tableName orderFields (fromDataSource ()) + + /// + /// Write all documents in the given table to the given PipeWriter, ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Fields by which the results should be ordered + [] + let writeAllOrdered tableName writer orderFields = + WithProps.Json.writeAllOrdered tableName writer orderFields (fromDataSource ()) + + /// Retrieve a JSON document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The JSON document if found, an empty JSON document otherwise + [] + let byId<'TKey> tableName (docId: 'TKey) = + WithProps.Json.byId tableName docId (fromDataSource ()) + + /// Write a JSON document to the given PipeWriter by its ID + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The ID of the document to retrieve + [] + let writeById<'TKey> tableName writer (docId: 'TKey) = + WithProps.Json.writeById tableName writer docId (fromDataSource ()) + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All JSON documents matching the given fields + [] + let byFields tableName howMatched fields = + WithProps.Json.byFields tableName howMatched fields (fromDataSource ()) + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + let writeByFields tableName writer howMatched fields = + WithProps.Json.writeByFields tableName writer howMatched fields (fromDataSource ()) + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given fields + /// in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given fields, ordered by the other given fields + [] + let byFieldsOrdered tableName howMatched queryFields orderFields = + WithProps.Json.byFieldsOrdered tableName howMatched queryFields orderFields (fromDataSource ()) + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) + /// ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + let writeByFieldsOrdered tableName writer howMatched queryFields orderFields = + WithProps.Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields (fromDataSource ()) + + /// Retrieve JSON documents matching a JSON containment query (@>) + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// All JSON documents matching the given containment query + [] + let byContains tableName (criteria: obj) = + WithProps.Json.byContains tableName criteria (fromDataSource ()) + + /// + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + [] + let writeByContains tableName writer (criteria: obj) = + WithProps.Json.writeByContains tableName writer criteria (fromDataSource ()) + + /// + /// Retrieve JSON documents matching a JSON containment query (@>) ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// All documents matching the given containment query, ordered by the given fields + [] + let byContainsOrdered tableName (criteria: obj) orderFields = + WithProps.Json.byContainsOrdered tableName criteria orderFields (fromDataSource ()) + + /// + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) ordered by + /// the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + [] + let writeByContainsOrdered tableName writer (criteria: obj) orderFields = + WithProps.Json.writeByContainsOrdered tableName writer criteria orderFields (fromDataSource ()) + + /// Retrieve JSON documents matching a JSON Path match query (@?) + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// All JSON documents matching the given JSON Path expression + [] + let byJsonPath tableName jsonPath = + WithProps.Json.byJsonPath tableName jsonPath (fromDataSource ()) + + /// + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The JSON Path expression to match + [] + let writeByJsonPath tableName writer jsonPath = + WithProps.Json.writeByJsonPath tableName writer jsonPath (fromDataSource ()) + + /// + /// Retrieve JSON documents matching a JSON Path match query (@?) ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given JSON Path expression, ordered by the given fields + [] + let byJsonPathOrdered tableName jsonPath orderFields = + WithProps.Json.byJsonPathOrdered tableName jsonPath orderFields (fromDataSource ()) + + /// + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) ordered by the + /// given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The JSON Path expression to match + /// Fields by which the results should be ordered + [] + let writeByJsonPathOrdered tableName writer jsonPath orderFields = + WithProps.Json.writeByJsonPathOrdered tableName writer jsonPath orderFields (fromDataSource ()) + + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByFields tableName howMatched fields = + WithProps.Json.firstByFields tableName howMatched fields (fromDataSource ()) + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons + /// (->> =, etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + let writeFirstByFields tableName writer howMatched fields = + WithProps.Json.writeFirstByFields tableName writer howMatched fields (fromDataSource ()) + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByFieldsOrdered tableName howMatched queryFields orderFields = + WithProps.Json.firstByFieldsOrdered tableName howMatched queryFields orderFields (fromDataSource ()) + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + let writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields = + WithProps.Json.writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields (fromDataSource ()) + + /// Retrieve the first JSON document matching a JSON containment query (@>) + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByContains tableName (criteria: obj) = + WithProps.Json.firstByContains tableName criteria (fromDataSource ()) + + /// + /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>) + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + [] + let writeFirstByContains tableName writer (criteria: obj) = + WithProps.Json.writeFirstByContains tableName writer criteria (fromDataSource ()) + + /// + /// Retrieve the first JSON document matching a JSON containment query (@>) ordered by the given fields in + /// the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByContainsOrdered tableName (criteria: obj) orderFields = + WithProps.Json.firstByContainsOrdered tableName criteria orderFields (fromDataSource ()) + + /// + /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>) + /// ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + [] + let writeFirstByContainsOrdered tableName writer (criteria: obj) orderFields = + WithProps.Json.writeFirstByContainsOrdered tableName writer criteria orderFields (fromDataSource ()) + + /// Retrieve the first JSON document matching a JSON Path match query (@?) + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByJsonPath tableName jsonPath = + WithProps.Json.firstByJsonPath tableName jsonPath (fromDataSource ()) + + /// + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The JSON Path expression to match + [] + let writeFirstByJsonPath tableName writer jsonPath = + WithProps.Json.writeFirstByJsonPath tableName writer jsonPath (fromDataSource ()) + + /// + /// Retrieve the first JSON document matching a JSON Path match query (@?) ordered by the given fields in the + /// document + /// + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByJsonPathOrdered tableName jsonPath orderFields = + WithProps.Json.firstByJsonPathOrdered tableName jsonPath orderFields (fromDataSource ()) + + /// + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) + /// ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The JSON Path expression to match + /// Fields by which the results should be ordered + [] + let writeFirstByJsonPathOrdered tableName writer jsonPath orderFields = + WithProps.Json.writeFirstByJsonPathOrdered tableName writer jsonPath orderFields (fromDataSource ()) + + /// Commands to update documents [] module Update = @@ -518,7 +877,7 @@ module Patch = WithProps.Patch.byId tableName docId patch (fromDataSource ()) /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) /// /// The table in which documents should be patched (may include schema) /// Whether to match any or all of the field conditions @@ -528,7 +887,7 @@ module Patch = let byFields tableName howMatched fields (patch: 'TPatch) = WithProps.Patch.byFields tableName howMatched fields patch (fromDataSource ()) - /// Patch documents using a JSON containment query in the WHERE clause (@>) + /// Patch documents using a JSON containment query in the WHERE clause (@>) /// The table in which documents should be patched (may include schema) /// The document to match the containment query /// The partial document to patch the existing document @@ -536,7 +895,7 @@ module Patch = let byContains tableName (criteria: 'TCriteria) (patch: 'TPatch) = WithProps.Patch.byContains tableName criteria patch (fromDataSource ()) - /// Patch documents using a JSON Path match query in the WHERE clause (@?) + /// Patch documents using a JSON Path match query in the WHERE clause (@?) /// The table in which documents should be patched (may include schema) /// The JSON Path expression to match /// The partial document to patch the existing document @@ -566,7 +925,7 @@ module RemoveFields = let byFields tableName howMatched fields fieldNames = WithProps.RemoveFields.byFields tableName howMatched fields fieldNames (fromDataSource ()) - /// Remove fields from documents via a JSON containment query (@>) + /// Remove fields from documents via a JSON containment query (@>) /// The table in which documents should be modified (may include schema) /// The document to match the containment query /// One or more field names to remove from the matching documents @@ -574,7 +933,7 @@ module RemoveFields = let byContains tableName (criteria: 'TContains) fieldNames = WithProps.RemoveFields.byContains tableName criteria fieldNames (fromDataSource ()) - /// Remove fields from documents via a JSON Path match query (@?) + /// Remove fields from documents via a JSON Path match query (@?) /// The table in which documents should be modified (may include schema) /// The JSON Path expression to match /// One or more field names to remove from the matching documents @@ -594,7 +953,7 @@ module Delete = let byId tableName (docId: 'TKey) = WithProps.Delete.byId tableName docId (fromDataSource ()) - /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// Delete documents by matching a JSON field comparison query (->> =, etc.) /// The table in which documents should be deleted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -602,7 +961,7 @@ module Delete = let byFields tableName howMatched fields = WithProps.Delete.byFields tableName howMatched fields (fromDataSource ()) - /// Delete documents by matching a JSON contains query (@>) + /// Delete documents by matching a JSON contains query (@>) /// The table in which documents should be deleted (may include schema) /// The document to match the containment query [] diff --git a/src/Postgres/Library.fs b/src/Postgres/Library.fs index edf03af..230c7f2 100644 --- a/src/Postgres/Library.fs +++ b/src/Postgres/Library.fs @@ -4,11 +4,11 @@ [] type DocumentIndex = - /// A GIN index with standard operations (all operators supported) + /// A GIN index with standard operations (all operators supported) | Full /// - /// A GIN index with JSONPath operations (optimized for @>, @?, @@ operators) + /// A GIN index with JSON Path operations (optimized for @>, @?, @@ operators) /// | Optimized @@ -94,7 +94,7 @@ module Parameters = name, Sql.jsonb (Configuration.serializer().Serialize it) /// Create JSON field parameters - /// The Fields to convert to parameters + /// The Fields to convert to parameters /// The current parameters for the query /// A unified sequence of parameter names and values [] @@ -129,7 +129,7 @@ module Parameters = /// Append JSON field name parameters for the given field names to the given parameters /// The names of fields to be addressed - /// The name (@name) and parameter value for the field names + /// The name (@name) and parameter value for the field names [] let fieldNameParams (fieldNames: string seq) = if Seq.length fieldNames = 1 then "@name", Sql.string (Seq.head fieldNames) @@ -145,12 +145,10 @@ module Parameters = [] module Query = - /// - /// Create a WHERE clause fragment to implement a comparison on fields in a JSON document - /// + /// Create a WHERE clause fragment to implement a comparison on fields in a JSON document /// How the fields should be matched /// The fields for the comparisons - /// A WHERE clause implementing the comparisons for the given fields + /// A WHERE clause implementing the comparisons for the given fields [] let whereByFields (howMatched: FieldMatch) fields = let name = ParameterName() @@ -179,9 +177,9 @@ module Query = else $"{it.Path PostgreSQL AsSql} {it.Comparison.OpSql} {param}") |> String.concat $" {howMatched} " - /// Create a WHERE clause fragment to implement an ID-based query + /// Create a WHERE clause fragment to implement an ID-based query /// The ID of the document - /// A WHERE clause fragment identifying a document by its ID + /// A WHERE clause fragment identifying a document by its ID [] let whereById<'TKey> (docId: 'TKey) = whereByFields Any [ { Field.Equal (Configuration.idField ()) docId with ParameterName = Some "@id" } ] @@ -206,32 +204,28 @@ module Query = let tableName = name.Split '.' |> Array.last $"CREATE INDEX IF NOT EXISTS idx_{tableName}_document ON {name} USING GIN (data{extraOps})" - /// - /// Create a WHERE clause fragment to implement a @> (JSON contains) condition - /// + /// Create a WHERE clause fragment to implement a @> (JSON contains) condition /// The parameter name for the query - /// A WHERE clause fragment for the contains condition + /// A WHERE clause fragment for the contains condition [] let whereDataContains paramName = $"data @> %s{paramName}" - /// - /// Create a WHERE clause fragment to implement a @? (JSON Path match) condition - /// + /// Create a WHERE clause fragment to implement a @? (JSON Path match) condition /// The parameter name for the query - /// A WHERE clause fragment for the JSON Path match condition + /// A WHERE clause fragment for the JSON Path match condition [] let whereJsonPathMatches paramName = $"data @? %s{paramName}::jsonpath" - /// Create an UPDATE statement to patch documents + /// Create an UPDATE statement to patch documents /// The table to be updated /// A query to patch documents [] let patch tableName = $"UPDATE %s{tableName} SET data = data || @data" - /// Create an UPDATE statement to remove fields from documents + /// Create an UPDATE statement to remove fields from documents /// The table to be updated /// A query to remove fields from documents [] @@ -270,6 +264,8 @@ module Query = Query.statementWhere statement (whereJsonPathMatches "@path") +open System.Text + /// Functions for dealing with results [] module Results = @@ -289,16 +285,67 @@ module Results = let fromData<'T> row : 'T = fromDocument "data" row - /// Extract a count from the column it + /// Extract a count from the column it /// A row reader set to the row with the count to retrieve /// The count from the row [] let toCount (row: RowReader) = row.int "it" - /// Extract a true/false value from the column it + /// Extract a true/false value from the column it /// A row reader set to the row with the true/false value to retrieve /// The true/false value from the row [] let toExists (row: RowReader) = row.bool "it" + + /// Extract a JSON document, specifying the field in which the document is found + /// The field name containing the JSON document + /// A row reader set to the row with the document to be extracted + /// The JSON from the given field (an empty object if no field exists) + [] + let jsonFromDocument field (row: RowReader) = + row.stringOrNone field |> Option.defaultValue "{}" + + /// Extract a JSON document + /// A row reader set to the row with the document to be extracted + /// The JSON from the row (an empty object if no field exists) + [] + let jsonFromData row = + jsonFromDocument "data" row + + /// Create a JSON array of items for the results of a query + /// The mapping function to extract JSON from the query's results + /// The query from which JSON should be extracted + /// A JSON array as a string; no results will produce an empty array ("[]") + [] + let toJsonArray (mapFunc: RowReader -> string) sqlProps = backgroundTask { + let output = StringBuilder("[") + do! sqlProps + |> Sql.iterAsync (fun it -> + if output.Length > 2 then ignore (output.Append ",") + mapFunc it |> output.Append |> ignore) + return output.Append("]").ToString() + } + + /// Create a JSON array of items for the results of a query + /// The mapping function to extract JSON from the query's results + /// The query from which JSON should be extracted + /// A JSON array as a string; no results will produce an empty array ("[]") + let ToJsonArray(mapFunc: System.Func, sqlProps) = + toJsonArray mapFunc.Invoke sqlProps + + /// Write a JSON array of items for the results of a query to the given PipeWriter + /// The PipeWriter to which results should be written + /// The mapping function to extract JSON from the query's results + /// The query from which JSON should be extracted + [] + let writeJsonArray writer (mapFunc: RowReader -> string) sqlProps = + sqlProps |> Sql.toSeq mapFunc |> PipeWriter.writeStrings writer + + /// Write a JSON array of items for the results of a query to the given PipeWriter + /// The PipeWriter to which results should be written + /// The mapping function to extract JSON from the query's results + /// The query from which JSON should be extracted + let WriteJsonArray(writer, mapFunc: System.Func, sqlProps) = + writeJsonArray writer mapFunc.Invoke sqlProps diff --git a/src/Postgres/README.md b/src/Postgres/README.md index ff442c9..d0b6cf2 100644 --- a/src/Postgres/README.md +++ b/src/Postgres/README.md @@ -13,7 +13,7 @@ This package provides a lightweight document library backed by [PostgreSQL](http ## Upgrading from v3 -There is a breaking API change for `ByField` (C#) / `byField` (F#), along with a compatibility namespace that can mitigate the impact of these changes. See [the migration guide](https://bitbadger.solutions/open-source/relational-documents/upgrade-from-v3-to-v4.html) for full details. +There is a breaking API change for `ByField` (C#) / `byField` (F#), along with a compatibility namespace that can mitigate the impact of these changes. See [the migration guide](https://relationaldocs.bitbadger.solutions/dotnet/upgrade/v4.html) for full details. ## Getting Started @@ -71,7 +71,7 @@ var customer = await Find.ById("customer", "123"); // Find.byId type signature is string -> 'TKey -> Task<'TDoc option> let! customer = Find.byId "customer" "123" ``` -_(keys are treated as strings or numbers depending on their defintion; however, they are indexed as strings)_ +_(keys are treated as strings or numbers depending on their definition; however, they are indexed as strings)_ Count customers in Atlanta (using JSON containment): @@ -103,4 +103,4 @@ do! Delete.byJsonPath "customer" """$.City ? (@ == "Chicago")""" ## More Information -The [project site](https://bitbadger.solutions/open-source/relational-documents/) has full details on how to use this library. +The [project site](https://relationaldocs.bitbadger.solutions/dotnet/) has full details on how to use this library. diff --git a/src/Postgres/WithProps.fs b/src/Postgres/WithProps.fs index da7bc86..16901ff 100644 --- a/src/Postgres/WithProps.fs +++ b/src/Postgres/WithProps.fs @@ -1,4 +1,4 @@ -/// Versions of queries that accept SqlProps as the last parameter +/// Versions of queries that accept SqlProps as the last parameter module BitBadger.Documents.Postgres.WithProps open BitBadger.Documents @@ -14,7 +14,7 @@ module Custom = /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// A list of results for the given query [] let list<'TDoc> query parameters (mapFunc: RowReader -> 'TDoc) sqlProps = @@ -26,22 +26,64 @@ module Custom = /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// A list of results for the given query let List<'TDoc>(query, parameters, mapFunc: System.Func, sqlProps) = backgroundTask { let! results = list<'TDoc> query parameters mapFunc.Invoke sqlProps return ResizeArray results } + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The SqlProps to use to execute the query + /// A JSON array of results for the given query + [] + let jsonArray query parameters (mapFunc: RowReader -> string) sqlProps = + Sql.query query sqlProps + |> Sql.parameters (FSharpList.ofSeq parameters) + |> toJsonArray mapFunc + + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The SqlProps to use to execute the query + /// A JSON array of results for the given query + let JsonArray(query, parameters, mapFunc: System.Func, sqlProps) = + jsonArray query parameters mapFunc.Invoke sqlProps + + /// Execute a query, writing its results to the given PipeWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The PipeWriter to which the results should be written + /// The mapping function to extract the document + /// The SqlProps to use to execute the query + [] + let writeJsonArray query parameters writer (mapFunc: RowReader -> string) sqlProps = + Sql.query query sqlProps + |> Sql.parameters (FSharpList.ofSeq parameters) + |> writeJsonArray writer mapFunc + + /// Execute a query, writing its results to the given PipeWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The PipeWriter to which the results should be written + /// The mapping function to extract the document + /// The SqlProps to use to execute the query + let WriteJsonArray(query, parameters, writer, mapFunc: System.Func, sqlProps) = + writeJsonArray query parameters writer mapFunc.Invoke sqlProps + /// Execute a query that returns one or no results /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The SqlProps to use to execute the query - /// Some with the first matching result, or None if not found + /// The SqlProps to use to execute the query + /// Some with the first matching result, or None if not found [] let single<'TDoc> query parameters mapFunc sqlProps = backgroundTask { - let! results = list<'TDoc> query parameters mapFunc sqlProps + let! results = list<'TDoc> $"{query} LIMIT 1" parameters mapFunc sqlProps return FSharpList.tryHead results } @@ -49,18 +91,39 @@ module Custom = /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The SqlProps to use to execute the query - /// The first matching result, or null if not found + /// The SqlProps to use to execute the query + /// The first matching result, or null if not found let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>( query, parameters, mapFunc: System.Func, sqlProps) = backgroundTask { let! result = single<'TDoc> query parameters mapFunc.Invoke sqlProps return Option.toObj result } + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The SqlProps to use to execute the query + /// The JSON document with the first matching result, or an empty document if not found + [] + let jsonSingle query parameters mapFunc sqlProps = backgroundTask { + let! results = jsonArray $"%s{query} LIMIT 1" parameters mapFunc sqlProps + return if results = "[]" then "{}" else results[1..results.Length - 2] + } + + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The SqlProps to use to execute the query + /// The JSON document with the first matching result, or an empty document if not found + let JsonSingle(query, parameters, mapFunc: System.Func, sqlProps) = + jsonSingle query parameters mapFunc.Invoke sqlProps + /// Execute a query that returns no results /// The query to retrieve the results /// Parameters to use for the query - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let nonQuery query parameters sqlProps = Sql.query query sqlProps @@ -72,7 +135,7 @@ module Custom = /// The query to retrieve the value /// Parameters to use for the query /// The mapping function to obtain the value - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// The scalar value for the query [] let scalar<'T when 'T: struct> query parameters (mapFunc: RowReader -> 'T) sqlProps = @@ -84,7 +147,7 @@ module Custom = /// The query to retrieve the value /// Parameters to use for the query /// The mapping function to obtain the value - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// The scalar value for the query let Scalar<'T when 'T: struct>(query, parameters, mapFunc: System.Func, sqlProps) = scalar<'T> query parameters mapFunc.Invoke sqlProps @@ -94,7 +157,7 @@ module Definition = /// Create a document table /// The table whose existence should be ensured (may include schema) - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let ensureTable name sqlProps = backgroundTask { do! Custom.nonQuery (Query.Definition.ensureTable name) [] sqlProps @@ -104,7 +167,7 @@ module Definition = /// Create an index on documents in the specified table /// The table to be indexed (may include schema) /// The type of document index to create - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let ensureDocumentIndex name idxType sqlProps = Custom.nonQuery (Query.Definition.ensureDocumentIndex name idxType) [] sqlProps @@ -113,7 +176,7 @@ module Definition = /// The table to be indexed (may include schema) /// The name of the index to create /// One or more fields to be indexed - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let ensureFieldIndex tableName indexName fields sqlProps = Custom.nonQuery (Query.Definition.ensureIndexOn tableName indexName fields PostgreSQL) [] sqlProps @@ -125,7 +188,7 @@ module Document = /// Insert a new document /// The table into which the document should be inserted (may include schema) /// The document to be inserted - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let insert<'TDoc> tableName (document: 'TDoc) sqlProps = let query = @@ -149,7 +212,7 @@ module Document = /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") /// The table into which the document should be saved (may include schema) /// The document to be saved - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let save<'TDoc> tableName (document: 'TDoc) sqlProps = Custom.nonQuery (Query.save tableName) [ jsonParam "@data" document ] sqlProps @@ -160,37 +223,37 @@ module Count = /// Count all documents in a table /// The table in which documents should be counted (may include schema) - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// The count of the documents in the table [] let all tableName sqlProps = Custom.scalar (Query.count tableName) [] toCount sqlProps - /// Count matching documents using JSON field comparisons (->> =, etc.) + /// Count matching documents using JSON field comparisons (->> =, etc.) /// The table in which documents should be counted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// The count of matching documents in the table [] let byFields tableName howMatched fields sqlProps = Custom.scalar (Query.byFields (Query.count tableName) howMatched fields) (addFieldParams fields []) toCount sqlProps - /// Count matching documents using a JSON containment query (@>) + /// Count matching documents using a JSON containment query (@>) /// The table in which documents should be counted (may include schema) /// The document to match with the containment query - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// The count of the documents in the table [] let byContains tableName (criteria: 'TContains) sqlProps = Custom.scalar (Query.byContains (Query.count tableName)) [ jsonParam "@criteria" criteria ] toCount sqlProps - /// Count matching documents using a JSON Path match query (@?) + /// Count matching documents using a JSON Path match query (@?) /// The table in which documents should be counted (may include schema) /// The JSON Path expression to be matched - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// The count of the documents in the table [] let byJsonPath tableName jsonPath sqlProps = @@ -204,17 +267,17 @@ module Exists = /// Determine if a document exists for the given ID /// The table in which existence should be checked (may include schema) /// The ID of the document whose existence should be checked - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// True if a document exists, false if not [] let byId tableName (docId: 'TKey) sqlProps = Custom.scalar (Query.exists tableName (Query.whereById docId)) [ idParam docId ] toExists sqlProps - /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// Determine if a document exists using JSON field comparisons (->> =, etc.) /// The table in which existence should be checked (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// True if any matching documents exist, false if not [] let byFields tableName howMatched fields sqlProps = @@ -224,10 +287,10 @@ module Exists = toExists sqlProps - /// Determine if a document exists using a JSON containment query (@>) + /// Determine if a document exists using a JSON containment query (@>) /// The table in which existence should be checked (may include schema) /// The document to match with the containment query - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// True if any matching documents exist, false if not [] let byContains tableName (criteria: 'TContains) sqlProps = @@ -237,10 +300,10 @@ module Exists = toExists sqlProps - /// Determine if a document exists using a JSON Path match query (@?) + /// Determine if a document exists using a JSON Path match query (@?) /// The table in which existence should be checked (may include schema) /// The JSON Path expression to be matched - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// True if any matching documents exist, false if not [] let byJsonPath tableName jsonPath sqlProps = @@ -250,13 +313,13 @@ module Exists = toExists sqlProps -/// Commands to retrieve documents +/// Commands to retrieve documents as domain objects [] module Find = /// Retrieve all documents in the given table /// The table from which documents should be retrieved (may include schema) - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents from the given table [] let all<'TDoc> tableName sqlProps = @@ -264,7 +327,7 @@ module Find = /// Retrieve all documents in the given table /// The table from which documents should be retrieved (may include schema) - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents from the given table let All<'TDoc>(tableName, sqlProps) = Custom.List<'TDoc>(Query.find tableName, [], fromData<'TDoc>, sqlProps) @@ -272,7 +335,7 @@ module Find = /// Retrieve all documents in the given table ordered by the given fields in the document /// The table from which documents should be retrieved (may include schema) /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents from the given table, ordered by the given fields [] let allOrdered<'TDoc> tableName orderFields sqlProps = @@ -281,7 +344,7 @@ module Find = /// Retrieve all documents in the given table ordered by the given fields in the document /// The table from which documents should be retrieved (may include schema) /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents from the given table, ordered by the given fields let AllOrdered<'TDoc>(tableName, orderFields, sqlProps) = Custom.List<'TDoc>( @@ -290,8 +353,8 @@ module Find = /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// The SqlProps to use to execute the query - /// Some with the document if found, None otherwise + /// The SqlProps to use to execute the query + /// Some with the document if found, None otherwise [] let byId<'TKey, 'TDoc> tableName (docId: 'TKey) sqlProps = Custom.single (Query.byId (Query.find tableName) docId) [ idParam docId ] fromData<'TDoc> sqlProps @@ -299,17 +362,17 @@ module Find = /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// The SqlProps to use to execute the query - /// The document if found, null otherwise + /// The SqlProps to use to execute the query + /// The document if found, null otherwise let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId: 'TKey, sqlProps) = Custom.Single<'TDoc>( Query.byId (Query.find tableName) docId, [ idParam docId ], fromData<'TDoc>, sqlProps) - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given fields [] let byFields<'TDoc> tableName howMatched fields sqlProps = @@ -319,11 +382,11 @@ module Find = fromData<'TDoc> sqlProps - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given fields let ByFields<'TDoc>(tableName, howMatched, fields, sqlProps) = Custom.List<'TDoc>( @@ -333,14 +396,14 @@ module Find = sqlProps) /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in /// the document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given fields, ordered by the other given fields [] let byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields sqlProps = @@ -351,14 +414,14 @@ module Find = sqlProps /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in - /// the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given fields, ordered by the other given fields let ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, sqlProps) = Custom.List<'TDoc>( @@ -367,20 +430,20 @@ module Find = fromData<'TDoc>, sqlProps) - /// Retrieve documents matching a JSON containment query (@>) + /// Retrieve documents matching a JSON containment query (@>) /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given containment query [] let byContains<'TDoc> tableName (criteria: obj) sqlProps = Custom.list<'TDoc> (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] fromData<'TDoc> sqlProps - /// Retrieve documents matching a JSON containment query (@>) + /// Retrieve documents matching a JSON containment query (@>) /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given containment query let ByContains<'TDoc>(tableName, criteria: obj, sqlProps) = Custom.List<'TDoc>( @@ -390,13 +453,12 @@ module Find = sqlProps) /// - /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the - /// document + /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given containment query, ordered by the given fields [] let byContainsOrdered<'TDoc> tableName (criteria: obj) orderFields sqlProps = @@ -407,13 +469,12 @@ module Find = sqlProps /// - /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the - /// document + /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given containment query, ordered by the given fields let ByContainsOrdered<'TDoc>(tableName, criteria: obj, orderFields, sqlProps) = Custom.List<'TDoc>( @@ -422,20 +483,20 @@ module Find = fromData<'TDoc>, sqlProps) - /// Retrieve documents matching a JSON Path match query (@?) + /// Retrieve documents matching a JSON Path match query (@?) /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given JSON Path expression [] let byJsonPath<'TDoc> tableName jsonPath sqlProps = Custom.list<'TDoc> (Query.byPathMatch (Query.find tableName)) [ "@path", Sql.string jsonPath ] fromData<'TDoc> sqlProps - /// Retrieve documents matching a JSON Path match query (@?) + /// Retrieve documents matching a JSON Path match query (@?) /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given JSON Path expression let ByJsonPath<'TDoc>(tableName, jsonPath, sqlProps) = Custom.List<'TDoc>( @@ -445,12 +506,12 @@ module Find = sqlProps) /// - /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document + /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given JSON Path expression, ordered by the given fields [] let byJsonPathOrdered<'TDoc> tableName jsonPath orderFields sqlProps = @@ -461,12 +522,12 @@ module Find = sqlProps /// - /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document + /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given JSON Path expression, ordered by the given fields let ByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, sqlProps) = Custom.List<'TDoc>( @@ -475,192 +536,599 @@ module Find = fromData<'TDoc>, sqlProps) - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqlProps to use to execute the query - /// Some with the first document, or None if not found + /// The SqlProps to use to execute the query + /// Some with the first document, or None if not found [] let firstByFields<'TDoc> tableName howMatched fields sqlProps = Custom.single<'TDoc> - $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1" + (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) fromData<'TDoc> sqlProps - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqlProps to use to execute the query - /// The first document, or null if not found + /// The SqlProps to use to execute the query + /// The first document, or null if not found let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields, sqlProps) = Custom.Single<'TDoc>( - $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1", + Query.byFields (Query.find tableName) howMatched fields, addFieldParams fields [], fromData<'TDoc>, sqlProps) /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given /// fields in the document /// /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query - /// - /// Some with the first document ordered by the given fields, or None if not found - /// + /// The SqlProps to use to execute the query + /// Some with the first document ordered by the given fields, or None if not found [] let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields sqlProps = Custom.single<'TDoc> - $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields PostgreSQL} LIMIT 1" + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields PostgreSQL) (addFieldParams queryFields []) fromData<'TDoc> sqlProps /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given /// fields in the document /// /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query - /// The first document ordered by the given fields, or null if not found + /// The SqlProps to use to execute the query + /// The first document ordered by the given fields, or null if not found let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( tableName, howMatched, queryFields, orderFields, sqlProps) = Custom.Single<'TDoc>( - $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields PostgreSQL} LIMIT 1", + Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields PostgreSQL, addFieldParams queryFields [], fromData<'TDoc>, sqlProps) - /// Retrieve the first document matching a JSON containment query (@>) + /// Retrieve the first document matching a JSON containment query (@>) /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query - /// The SqlProps to use to execute the query - /// Some with the first document, or None if not found + /// The SqlProps to use to execute the query + /// Some with the first document, or None if not found [] let firstByContains<'TDoc> tableName (criteria: obj) sqlProps = Custom.single<'TDoc> - $"{Query.byContains (Query.find tableName)} LIMIT 1" - [ jsonParam "@criteria" criteria ] - fromData<'TDoc> - sqlProps + (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] fromData<'TDoc> sqlProps - /// Retrieve the first document matching a JSON containment query (@>) + /// Retrieve the first document matching a JSON containment query (@>) /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query - /// The SqlProps to use to execute the query - /// The first document, or null if not found + /// The SqlProps to use to execute the query + /// The first document, or null if not found let FirstByContains<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, criteria: obj, sqlProps) = Custom.Single<'TDoc>( - $"{Query.byContains (Query.find tableName)} LIMIT 1", - [ jsonParam "@criteria" criteria ], - fromData<'TDoc>, - sqlProps) + Query.byContains (Query.find tableName), [ jsonParam "@criteria" criteria ], fromData<'TDoc>, sqlProps) /// - /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in - /// the document + /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the + /// document /// /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query - /// - /// Some with the first document ordered by the given fields, or None if not found - /// + /// The SqlProps to use to execute the query + /// Some with the first document ordered by the given fields, or None if not found [] let firstByContainsOrdered<'TDoc> tableName (criteria: obj) orderFields sqlProps = Custom.single<'TDoc> - $"{Query.byContains (Query.find tableName)}{Query.orderBy orderFields PostgreSQL} LIMIT 1" + (Query.byContains (Query.find tableName) + Query.orderBy orderFields PostgreSQL) [ jsonParam "@criteria" criteria ] fromData<'TDoc> sqlProps /// - /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in + /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the + /// document + /// + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// The first document ordered by the given fields, or null if not found + let FirstByContainsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( + tableName, criteria: obj, orderFields, sqlProps) = + Custom.Single<'TDoc>( + Query.byContains (Query.find tableName) + Query.orderBy orderFields PostgreSQL, + [ jsonParam "@criteria" criteria ], + fromData<'TDoc>, + sqlProps) + + /// Retrieve the first document matching a JSON Path match query (@?) + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// The SqlProps to use to execute the query + /// Some with the first document, or None if not found + [] + let firstByJsonPath<'TDoc> tableName jsonPath sqlProps = + Custom.single<'TDoc> + (Query.byPathMatch (Query.find tableName)) + [ "@path", Sql.string jsonPath ] + fromData<'TDoc> + sqlProps + + /// Retrieve the first document matching a JSON Path match query (@?) + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// The SqlProps to use to execute the query + /// The first document, or null if not found + let FirstByJsonPath<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, jsonPath, sqlProps) = + Custom.Single<'TDoc>( + Query.byPathMatch (Query.find tableName), + [ "@path", Sql.string jsonPath ], + fromData<'TDoc>, + sqlProps) + + /// + /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the + /// document + /// + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// Some with the first document ordered by the given fields, or None if not found + [] + let firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields sqlProps = + Custom.single<'TDoc> + (Query.byPathMatch (Query.find tableName) + Query.orderBy orderFields PostgreSQL) + [ "@path", Sql.string jsonPath ] + fromData<'TDoc> + sqlProps + + /// + /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the + /// document + /// + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// The first document ordered by the given fields, or null if not found + let FirstByJsonPathOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( + tableName, jsonPath, orderFields, sqlProps) = + Custom.Single<'TDoc>( + Query.byPathMatch (Query.find tableName) + Query.orderBy orderFields PostgreSQL, + [ "@path", Sql.string jsonPath ], + fromData<'TDoc>, + sqlProps) + +/// Commands to retrieve documents as JSON +[] +module Json = + + /// Retrieve all documents in the given table as a JSON array + /// The table from which documents should be retrieved (may include schema) + /// The SqlProps to use to execute the query + /// All documents from the given table as a JSON array + [] + let all tableName sqlProps = + Custom.jsonArray (Query.find tableName) [] jsonFromData sqlProps + + /// Write all documents in the given table to the given PipeWriter + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The SqlProps to use to execute the query + [] + let writeAll tableName writer sqlProps = + Custom.writeJsonArray (Query.find tableName) [] writer jsonFromData sqlProps + + /// + /// Retrieve all documents in the given table as a JSON array, ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// All documents from the given table as a JSON array, ordered by the given fields + [] + let allOrdered tableName orderFields sqlProps = + Custom.jsonArray (Query.find tableName + Query.orderBy orderFields PostgreSQL) [] jsonFromData sqlProps + + /// + /// Write all documents in the given table to the given PipeWriter, ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + [] + let writeAllOrdered tableName writer orderFields sqlProps = + Custom.writeJsonArray + (Query.find tableName + Query.orderBy orderFields PostgreSQL) [] writer jsonFromData sqlProps + + /// Retrieve a JSON document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The SqlProps to use to execute the query + /// The JSON document if found, an empty JSON document otherwise + [] + let byId<'TKey> tableName (docId: 'TKey) sqlProps = + Custom.jsonSingle (Query.byId (Query.find tableName) docId) [ idParam docId ] jsonFromData sqlProps + + /// Write a JSON document to the given PipeWriter by its ID + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The ID of the document to retrieve + /// The SqlProps to use to execute the query + [] + let writeById<'TKey> tableName writer (docId: 'TKey) sqlProps = backgroundTask { + let! json = byId tableName docId sqlProps + let! _ = PipeWriter.writeString writer json + () + } + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqlProps to use to execute the query + /// All JSON documents matching the given fields + [] + let byFields tableName howMatched fields sqlProps = + Custom.jsonArray + (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) jsonFromData sqlProps + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqlProps to use to execute the query + [] + let writeByFields tableName writer howMatched fields sqlProps = + Custom.writeJsonArray + (Query.byFields (Query.find tableName) howMatched fields) + (addFieldParams fields []) + writer + jsonFromData + sqlProps + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given fields + /// in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// All JSON documents matching the given fields, ordered by the other given fields + [] + let byFieldsOrdered tableName howMatched queryFields orderFields sqlProps = + Custom.jsonArray + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields PostgreSQL) + (addFieldParams queryFields []) + jsonFromData + sqlProps + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) + /// ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + [] + let writeByFieldsOrdered tableName writer howMatched queryFields orderFields sqlProps = + Custom.writeJsonArray + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields PostgreSQL) + (addFieldParams queryFields []) + writer + jsonFromData + sqlProps + + /// Retrieve JSON documents matching a JSON containment query (@>) + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// The SqlProps to use to execute the query + /// All JSON documents matching the given containment query + [] + let byContains tableName (criteria: obj) sqlProps = + Custom.jsonArray + (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] jsonFromData sqlProps + + /// + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + /// The SqlProps to use to execute the query + [] + let writeByContains tableName writer (criteria: obj) sqlProps = + Custom.writeJsonArray + (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] writer jsonFromData sqlProps + + /// + /// Retrieve JSON documents matching a JSON containment query (@>) ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// All documents matching the given containment query, ordered by the given fields + [] + let byContainsOrdered tableName (criteria: obj) orderFields sqlProps = + Custom.jsonArray + (Query.byContains (Query.find tableName) + Query.orderBy orderFields PostgreSQL) + [ jsonParam "@criteria" criteria ] + jsonFromData + sqlProps + + /// + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) ordered by + /// the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + [] + let writeByContainsOrdered tableName writer (criteria: obj) orderFields sqlProps = + Custom.writeJsonArray + (Query.byContains (Query.find tableName) + Query.orderBy orderFields PostgreSQL) + [ jsonParam "@criteria" criteria ] + writer + jsonFromData + sqlProps + + /// Retrieve JSON documents matching a JSON Path match query (@?) + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// The SqlProps to use to execute the query + /// All JSON documents matching the given JSON Path expression + [] + let byJsonPath tableName jsonPath sqlProps = + Custom.jsonArray + (Query.byPathMatch (Query.find tableName)) [ "@path", Sql.string jsonPath ] jsonFromData sqlProps + + /// + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The JSON Path expression to match + /// The SqlProps to use to execute the query + [] + let writeByJsonPath tableName writer jsonPath sqlProps = + Custom.writeJsonArray + (Query.byPathMatch (Query.find tableName)) [ "@path", Sql.string jsonPath ] writer jsonFromData sqlProps + + /// + /// Retrieve JSON documents matching a JSON Path match query (@?) ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// All JSON documents matching the given JSON Path expression, ordered by the given fields + [] + let byJsonPathOrdered tableName jsonPath orderFields sqlProps = + Custom.jsonArray + (Query.byPathMatch (Query.find tableName) + Query.orderBy orderFields PostgreSQL) + [ "@path", Sql.string jsonPath ] + jsonFromData + sqlProps + + /// + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) ordered by the + /// given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + [] + let writeByJsonPathOrdered tableName writer jsonPath orderFields sqlProps = + Custom.writeJsonArray + (Query.byPathMatch (Query.find tableName) + Query.orderBy orderFields PostgreSQL) + [ "@path", Sql.string jsonPath ] + writer + jsonFromData + sqlProps + + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqlProps to use to execute the query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByFields tableName howMatched fields sqlProps = + Custom.jsonSingle + (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) jsonFromData sqlProps + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons(->> =, + /// etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqlProps to use to execute the query + [] + let writeFirstByFields tableName writer howMatched fields sqlProps = backgroundTask { + let! json = firstByFields tableName howMatched fields sqlProps + let! _ = PipeWriter.writeString writer json + () + } + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByFieldsOrdered tableName howMatched queryFields orderFields sqlProps = + Custom.jsonSingle + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields PostgreSQL) + (addFieldParams queryFields []) + jsonFromData + sqlProps + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + [] + let writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields sqlProps = backgroundTask { + let! json = firstByFieldsOrdered tableName howMatched queryFields orderFields sqlProps + let! _ = PipeWriter.writeString writer json + () + } + + /// Retrieve the first JSON document matching a JSON containment query (@>) + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// The SqlProps to use to execute the query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByContains tableName (criteria: obj) sqlProps = + Custom.jsonSingle + (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] jsonFromData sqlProps + + /// + /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>) + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + /// The SqlProps to use to execute the query + [] + let writeFirstByContains tableName writer (criteria: obj) sqlProps = backgroundTask { + let! json = firstByContains tableName criteria sqlProps + let! _ = PipeWriter.writeString writer json + () + } + + /// + /// Retrieve the first JSON document matching a JSON containment query (@>) ordered by the given fields in /// the document /// /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query - /// The first document ordered by the given fields, or null if not found - let FirstByContainsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( - tableName, criteria: obj, orderFields, sqlProps) = - Custom.Single<'TDoc>( - $"{Query.byContains (Query.find tableName)}{Query.orderBy orderFields PostgreSQL} LIMIT 1", - [ jsonParam "@criteria" criteria ], - fromData<'TDoc>, - sqlProps) - - /// Retrieve the first document matching a JSON Path match query (@?) - /// The table from which a document should be retrieved (may include schema) - /// The JSON Path expression to match - /// The SqlProps to use to execute the query - /// Some with the first document, or None if not found - [] - let firstByJsonPath<'TDoc> tableName jsonPath sqlProps = - Custom.single<'TDoc> - $"{Query.byPathMatch (Query.find tableName)} LIMIT 1" - [ "@path", Sql.string jsonPath ] - fromData<'TDoc> + /// The SqlProps to use to execute the query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByContainsOrdered tableName (criteria: obj) orderFields sqlProps = + Custom.jsonSingle + (Query.byContains (Query.find tableName) + Query.orderBy orderFields PostgreSQL) + [ jsonParam "@criteria" criteria ] + jsonFromData sqlProps - /// Retrieve the first document matching a JSON Path match query (@?) + /// + /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>) + /// ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + [] + let writeFirstByContainsOrdered tableName writer (criteria: obj) orderFields sqlProps = backgroundTask { + let! json = firstByContainsOrdered tableName criteria orderFields sqlProps + let! _ = PipeWriter.writeString writer json + () + } + + /// Retrieve the first JSON document matching a JSON Path match query (@?) /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match - /// The SqlProps to use to execute the query - /// The first document, or null if not found - let FirstByJsonPath<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, jsonPath, sqlProps) = - Custom.Single<'TDoc>( - $"{Query.byPathMatch (Query.find tableName)} LIMIT 1", - [ "@path", Sql.string jsonPath ], - fromData<'TDoc>, - sqlProps) + /// The SqlProps to use to execute the query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByJsonPath tableName jsonPath sqlProps = + Custom.jsonSingle + (Query.byPathMatch (Query.find tableName)) [ "@path", Sql.string jsonPath ] jsonFromData sqlProps /// - /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The JSON Path expression to match + /// The SqlProps to use to execute the query + [] + let writeFirstByJsonPath tableName writer jsonPath sqlProps = backgroundTask { + let! json = firstByJsonPath tableName jsonPath sqlProps + let! _ = PipeWriter.writeString writer json + () + } + + /// + /// Retrieve the first JSON document matching a JSON Path match query (@?) ordered by the given fields in the /// document /// /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query - /// - /// Some with the first document ordered by the given fields, or None if not found - /// - [] - let firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields sqlProps = - Custom.single<'TDoc> - $"{Query.byPathMatch (Query.find tableName)}{Query.orderBy orderFields PostgreSQL} LIMIT 1" + /// The SqlProps to use to execute the query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByJsonPathOrdered tableName jsonPath orderFields sqlProps = + Custom.jsonSingle + (Query.byPathMatch (Query.find tableName) + Query.orderBy orderFields PostgreSQL) [ "@path", Sql.string jsonPath ] - fromData<'TDoc> + jsonFromData sqlProps /// - /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the - /// document + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) + /// ordered by the given fields in the document /// /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written /// The JSON Path expression to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query - /// The first document ordered by the given fields, or null if not found - let FirstByJsonPathOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( - tableName, jsonPath, orderFields, sqlProps) = - Custom.Single<'TDoc>( - $"{Query.byPathMatch (Query.find tableName)}{Query.orderBy orderFields PostgreSQL} LIMIT 1", - [ "@path", Sql.string jsonPath ], - fromData<'TDoc>, - sqlProps) + /// The SqlProps to use to execute the query + [] + let writeFirstByJsonPathOrdered tableName writer jsonPath orderFields sqlProps = backgroundTask { + let! json = firstByJsonPathOrdered tableName jsonPath orderFields sqlProps + let! _ = PipeWriter.writeString writer json + () + } /// Commands to update documents [] @@ -670,7 +1138,7 @@ module Update = /// The table in which a document should be updated (may include schema) /// The ID of the document to be updated (replaced) /// The new document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byId tableName (docId: 'TKey) (document: 'TDoc) sqlProps = Custom.nonQuery @@ -682,7 +1150,7 @@ module Update = /// The table in which a document should be updated (may include schema) /// The function to obtain the ID of the document /// The new document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byFunc tableName (idFunc: 'TDoc -> 'TKey) (document: 'TDoc) sqlProps = byId tableName (idFunc document) document sqlProps @@ -693,7 +1161,7 @@ module Update = /// The table in which a document should be updated (may include schema) /// The function to obtain the ID of the document /// The new document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query let ByFunc(tableName, idFunc: System.Func<'TDoc, 'TKey>, document: 'TDoc, sqlProps) = byFunc tableName idFunc.Invoke document sqlProps @@ -705,20 +1173,20 @@ module Patch = /// The table in which a document should be patched (may include schema) /// The ID of the document to patch /// The partial document to patch the existing document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byId tableName (docId: 'TKey) (patch: 'TPatch) sqlProps = Custom.nonQuery (Query.byId (Query.patch tableName) docId) [ idParam docId; jsonParam "@data" patch ] sqlProps /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) /// /// The table in which documents should be patched (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// The partial document to patch the existing document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byFields tableName howMatched fields (patch: 'TPatch) sqlProps = Custom.nonQuery @@ -726,11 +1194,11 @@ module Patch = (addFieldParams fields [ jsonParam "@data" patch ]) sqlProps - /// Patch documents using a JSON containment query in the WHERE clause (@>) + /// Patch documents using a JSON containment query in the WHERE clause (@>) /// The table in which documents should be patched (may include schema) /// The document to match the containment query /// The partial document to patch the existing document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byContains tableName (criteria: 'TContains) (patch: 'TPatch) sqlProps = Custom.nonQuery @@ -738,11 +1206,11 @@ module Patch = [ jsonParam "@data" patch; jsonParam "@criteria" criteria ] sqlProps - /// Patch documents using a JSON Path match query in the WHERE clause (@?) + /// Patch documents using a JSON Path match query in the WHERE clause (@?) /// The table in which documents should be patched (may include schema) /// The JSON Path expression to match /// The partial document to patch the existing document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byJsonPath tableName jsonPath (patch: 'TPatch) sqlProps = Custom.nonQuery @@ -758,7 +1226,7 @@ module RemoveFields = /// The table in which a document should be modified (may include schema) /// The ID of the document to modify /// One or more field names to remove from the document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byId tableName (docId: 'TKey) fieldNames sqlProps = Custom.nonQuery @@ -769,7 +1237,7 @@ module RemoveFields = /// Whether to match any or all of the field conditions /// The field conditions to match /// One or more field names to remove from the matching documents - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byFields tableName howMatched fields fieldNames sqlProps = Custom.nonQuery @@ -777,11 +1245,11 @@ module RemoveFields = (addFieldParams fields [ fieldNameParams fieldNames ]) sqlProps - /// Remove fields from documents via a JSON containment query (@>) + /// Remove fields from documents via a JSON containment query (@>) /// The table in which documents should be modified (may include schema) /// The document to match the containment query /// One or more field names to remove from the matching documents - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byContains tableName (criteria: 'TContains) fieldNames sqlProps = Custom.nonQuery @@ -789,11 +1257,11 @@ module RemoveFields = [ jsonParam "@criteria" criteria; fieldNameParams fieldNames ] sqlProps - /// Remove fields from documents via a JSON Path match query (@?) + /// Remove fields from documents via a JSON Path match query (@?) /// The table in which documents should be modified (may include schema) /// The JSON Path expression to match /// One or more field names to remove from the matching documents - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byJsonPath tableName jsonPath fieldNames sqlProps = Custom.nonQuery @@ -808,33 +1276,33 @@ module Delete = /// Delete a document by its ID /// The table in which a document should be deleted (may include schema) /// The ID of the document to delete - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byId tableName (docId: 'TKey) sqlProps = Custom.nonQuery (Query.byId (Query.delete tableName) docId) [ idParam docId ] sqlProps - /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// Delete documents by matching a JSON field comparison query (->> =, etc.) /// The table in which documents should be deleted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byFields tableName howMatched fields sqlProps = Custom.nonQuery (Query.byFields (Query.delete tableName) howMatched fields) (addFieldParams fields []) sqlProps - /// Delete documents by matching a JSON contains query (@>) + /// Delete documents by matching a JSON contains query (@>) /// The table in which documents should be deleted (may include schema) /// The document to match the containment query - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byContains tableName (criteria: 'TCriteria) sqlProps = Custom.nonQuery (Query.byContains (Query.delete tableName)) [ jsonParam "@criteria" criteria ] sqlProps - /// Delete documents by matching a JSON Path match query (@?) + /// Delete documents by matching a JSON Path match query (@?) /// The table in which documents should be deleted (may include schema) /// The JSON Path expression to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byJsonPath tableName jsonPath sqlProps = Custom.nonQuery (Query.byPathMatch (Query.delete tableName)) [ "@path", Sql.string jsonPath ] sqlProps diff --git a/src/Sqlite/BitBadger.Documents.Sqlite.fsproj b/src/Sqlite/BitBadger.Documents.Sqlite.fsproj index e19b49d..c830a84 100644 --- a/src/Sqlite/BitBadger.Documents.Sqlite.fsproj +++ b/src/Sqlite/BitBadger.Documents.Sqlite.fsproj @@ -8,6 +8,8 @@ + + diff --git a/src/Sqlite/Extensions.fs b/src/Sqlite/Extensions.fs index 901bc0a..c6d192d 100644 --- a/src/Sqlite/Extensions.fs +++ b/src/Sqlite/Extensions.fs @@ -1,6 +1,7 @@ namespace BitBadger.Documents.Sqlite open Microsoft.Data.Sqlite +open WithConn /// F# extensions for the SqliteConnection type [] @@ -14,21 +15,45 @@ module Extensions = /// The mapping function between the document and the domain item /// A list of results for the given query member conn.customList<'TDoc> query parameters mapFunc = - WithConn.Custom.list<'TDoc> query parameters mapFunc conn + Custom.list<'TDoc> query parameters mapFunc conn + + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + member conn.customJsonArray query parameters mapFunc = + Custom.jsonArray query parameters mapFunc conn + + /// Execute a query, writing its results to the given PipeWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The PipeWriter to which the results should be written + /// The mapping function to extract the document + member conn.writeCustomJsonArray query parameters writer mapFunc = + Custom.writeJsonArray query parameters writer mapFunc conn /// Execute a query that returns one or no results /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// Some with the first matching result, or None if not found + /// Some with the first matching result, or None if not found member conn.customSingle<'TDoc> query parameters mapFunc = - WithConn.Custom.single<'TDoc> query parameters mapFunc conn + Custom.single<'TDoc> query parameters mapFunc conn + + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + member conn.customJsonSingle query parameters mapFunc = + Custom.jsonSingle query parameters mapFunc conn /// Execute a query that returns no results /// The query to retrieve the results /// Parameters to use for the query member conn.customNonQuery query parameters = - WithConn.Custom.nonQuery query parameters conn + Custom.nonQuery query parameters conn /// Execute a query that returns a scalar value /// The query to retrieve the value @@ -36,25 +61,25 @@ module Extensions = /// The mapping function to obtain the value /// The scalar value for the query member conn.customScalar<'T when 'T: struct> query parameters mapFunc = - WithConn.Custom.scalar<'T> query parameters mapFunc conn + Custom.scalar<'T> query parameters mapFunc conn /// Create a document table /// The table whose existence should be ensured (may include schema) member conn.ensureTable name = - WithConn.Definition.ensureTable name conn + Definition.ensureTable name conn /// Create an index on field(s) within documents in the specified table /// The table to be indexed (may include schema) /// The name of the index to create /// One or more fields to be indexed member conn.ensureFieldIndex tableName indexName fields = - WithConn.Definition.ensureFieldIndex tableName indexName fields conn + Definition.ensureFieldIndex tableName indexName fields conn /// Insert a new document /// The table into which the document should be inserted (may include schema) /// The document to be inserted member conn.insert<'TDoc> tableName (document: 'TDoc) = - WithConn.Document.insert<'TDoc> tableName document conn + insert<'TDoc> tableName document conn /// /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") @@ -62,68 +87,68 @@ module Extensions = /// The table into which the document should be saved (may include schema) /// The document to be saved member conn.save<'TDoc> tableName (document: 'TDoc) = - WithConn.Document.save tableName document conn + save tableName document conn /// Count all documents in a table /// The table in which documents should be counted (may include schema) /// The count of the documents in the table member conn.countAll tableName = - WithConn.Count.all tableName conn + Count.all tableName conn - /// Count matching documents using JSON field comparisons (->> =, etc.) + /// Count matching documents using JSON field comparisons (->> =, etc.) /// The table in which documents should be counted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// The count of matching documents in the table member conn.countByFields tableName howMatched fields = - WithConn.Count.byFields tableName howMatched fields conn + Count.byFields tableName howMatched fields conn /// Determine if a document exists for the given ID /// The table in which existence should be checked (may include schema) /// The ID of the document whose existence should be checked /// True if a document exists, false if not member conn.existsById tableName (docId: 'TKey) = - WithConn.Exists.byId tableName docId conn + Exists.byId tableName docId conn - /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// Determine if a document exists using JSON field comparisons (->> =, etc.) /// The table in which existence should be checked (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// True if any matching documents exist, false if not member conn.existsByFields tableName howMatched fields = - WithConn.Exists.byFields tableName howMatched fields conn + Exists.byFields tableName howMatched fields conn /// Retrieve all documents in the given table /// The table from which documents should be retrieved (may include schema) /// All documents from the given table member conn.findAll<'TDoc> tableName = - WithConn.Find.all<'TDoc> tableName conn + Find.all<'TDoc> tableName conn /// Retrieve all documents in the given table ordered by the given fields in the document /// The table from which documents should be retrieved (may include schema) /// Fields by which the results should be ordered /// All documents from the given table, ordered by the given fields member conn.findAllOrdered<'TDoc> tableName orderFields = - WithConn.Find.allOrdered<'TDoc> tableName orderFields conn + Find.allOrdered<'TDoc> tableName orderFields conn /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// Some with the document if found, None otherwise + /// Some with the document if found, None otherwise member conn.findById<'TKey, 'TDoc> tableName (docId: 'TKey) = - WithConn.Find.byId<'TKey, 'TDoc> tableName docId conn + Find.byId<'TKey, 'TDoc> tableName docId conn - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// All documents matching the given fields member conn.findByFields<'TDoc> tableName howMatched fields = - WithConn.Find.byFields<'TDoc> tableName howMatched fields conn + Find.byFields<'TDoc> tableName howMatched fields conn /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields - /// in the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in + /// the document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions @@ -131,18 +156,18 @@ module Extensions = /// Fields by which the results should be ordered /// All documents matching the given fields, ordered by the other given fields member conn.findByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = - WithConn.Find.byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn + Find.byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// Some with the first document, or None if not found + /// Some with the first document, or None if not found member conn.findFirstByFields<'TDoc> tableName howMatched fields = - WithConn.Find.firstByFields<'TDoc> tableName howMatched fields conn + Find.firstByFields<'TDoc> tableName howMatched fields conn /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the /// given fields in the document /// /// The table from which a document should be retrieved (may include schema) @@ -150,17 +175,148 @@ module Extensions = /// The field conditions to match /// Fields by which the results should be ordered /// - /// Some with the first document ordered by the given fields, or None if not found + /// Some with the first document ordered by the given fields, or None if not found /// member conn.findFirstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = - WithConn.Find.firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn + Find.firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn + + /// Retrieve all JSON documents in the given table + /// The table from which documents should be retrieved (may include schema) + /// All JSON documents from the given table + member conn.jsonAll tableName = + Json.all tableName conn + + /// Retrieve all JSON documents in the given table ordered by the given fields in the document + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All JSON documents from the given table, ordered by the given fields + member conn.jsonAllOrdered tableName orderFields = + Json.allOrdered tableName orderFields conn + + /// Retrieve a JSON document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The JSON document if found, an empty JSON document otherwise + member conn.jsonById<'TKey> tableName (docId: 'TKey) = + Json.byId tableName docId conn + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All JSON documents matching the given fields + member conn.jsonByFields tableName howMatched fields = + Json.byFields tableName howMatched fields conn + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given fields, ordered by the other given fields + member conn.jsonByFieldsOrdered tableName howMatched queryFields orderFields = + Json.byFieldsOrdered tableName howMatched queryFields orderFields conn + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The first JSON document if found, an empty JSON document otherwise + member conn.jsonFirstByFields tableName howMatched fields = + Json.firstByFields tableName howMatched fields conn + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the + /// given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The first JSON document (in order) if found, an empty JSON document otherwise + member conn.jsonFirstByFieldsOrdered tableName howMatched queryFields orderFields = + Json.firstByFieldsOrdered tableName howMatched queryFields orderFields conn + + /// Write all JSON documents in the given table to the given PipeWriter + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + member conn.writeJsonAll tableName writer = + Json.writeAll tableName writer conn + + /// + /// Write all JSON all documents in the given table to the given PipeWriter, ordered by the given fields + /// in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Fields by which the results should be ordered + member conn.writeJsonAllOrdered tableName writer orderFields = + Json.writeAllOrdered tableName writer orderFields conn + + /// Write a JSON document to the given PipeWriter by its ID + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The ID of the document to retrieve + member conn.writeJsonById<'TKey> tableName writer (docId: 'TKey) = + Json.writeById tableName writer docId conn + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, + /// etc.) + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + member conn.writeJsonByFields tableName writer howMatched fields = + Json.writeByFields tableName writer howMatched fields conn + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, + /// etc.) ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + member conn.writeJsonByFieldsOrdered tableName writer howMatched queryFields orderFields = + Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields conn + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons + /// (->> =, etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + member conn.writeJsonFirstByFields tableName writer howMatched fields = + Json.writeFirstByFields tableName writer howMatched fields conn + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + member conn.writeJsonFirstByFieldsOrdered tableName writer howMatched queryFields orderFields = + Json.writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields conn /// Update (replace) an entire document by its ID /// The table in which a document should be updated (may include schema) /// The ID of the document to be updated (replaced) /// The new document member conn.updateById tableName (docId: 'TKey) (document: 'TDoc) = - WithConn.Update.byId tableName docId document conn + Update.byId tableName docId document conn /// /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the @@ -170,32 +326,31 @@ module Extensions = /// The function to obtain the ID of the document /// The new document member conn.updateByFunc tableName (idFunc: 'TDoc -> 'TKey) (document: 'TDoc) = - WithConn.Update.byFunc tableName idFunc document conn + Update.byFunc tableName idFunc document conn /// Patch a document by its ID /// The table in which a document should be patched (may include schema) /// The ID of the document to patch /// The partial document to patch the existing document member conn.patchById tableName (docId: 'TKey) (patch: 'TPatch) = - WithConn.Patch.byId tableName docId patch conn + Patch.byId tableName docId patch conn /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, - /// etc.) + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) /// /// The table in which documents should be patched (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// The partial document to patch the existing document member conn.patchByFields tableName howMatched fields (patch: 'TPatch) = - WithConn.Patch.byFields tableName howMatched fields patch conn + Patch.byFields tableName howMatched fields patch conn /// Remove fields from a document by the document's ID /// The table in which a document should be modified (may include schema) /// The ID of the document to modify /// One or more field names to remove from the document member conn.removeFieldsById tableName (docId: 'TKey) fieldNames = - WithConn.RemoveFields.byId tableName docId fieldNames conn + RemoveFields.byId tableName docId fieldNames conn /// Remove fields from documents via a comparison on JSON fields in the document /// The table in which documents should be modified (may include schema) @@ -203,20 +358,20 @@ module Extensions = /// The field conditions to match /// One or more field names to remove from the matching documents member conn.removeFieldsByFields tableName howMatched fields fieldNames = - WithConn.RemoveFields.byFields tableName howMatched fields fieldNames conn + RemoveFields.byFields tableName howMatched fields fieldNames conn /// Delete a document by its ID /// The table in which a document should be deleted (may include schema) /// The ID of the document to delete member conn.deleteById tableName (docId: 'TKey) = - WithConn.Delete.byId tableName docId conn + Delete.byId tableName docId conn - /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// Delete documents by matching a JSON field comparison query (->> =, etc.) /// The table in which documents should be deleted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match member conn.deleteByFields tableName howMatched fields = - WithConn.Delete.byFields tableName howMatched fields conn + Delete.byFields tableName howMatched fields conn open System.Runtime.CompilerServices @@ -225,36 +380,66 @@ open System.Runtime.CompilerServices type SqliteConnectionCSharpExtensions = /// Execute a query that returns a list of results - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item /// A list of results for the given query [] static member inline CustomList<'TDoc>(conn, query, parameters, mapFunc: System.Func) = - WithConn.Custom.List<'TDoc>(query, parameters, mapFunc, conn) + Custom.List<'TDoc>(query, parameters, mapFunc, conn) + /// Execute a query that returns a JSON array of results + /// The SqliteConnection on which to run the query + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + [] + static member inline CustomJsonArray(conn, query, parameters, mapFunc) = + Custom.JsonArray(query, parameters, mapFunc, conn) + + /// Execute a query, writing its results to the given PipeWriter + /// The SqliteConnection on which to run the query + /// The query to retrieve the results + /// Parameters to use for the query + /// The PipeWriter to which the results should be written + /// The mapping function to extract the document + [] + static member inline WriteCustomJsonArray(conn, query, parameters, writer, mapFunc) = + Custom.WriteJsonArray(query, parameters, writer, mapFunc, conn) + /// Execute a query that returns one or no results - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The first matching result, or null if not found + /// The first matching result, or null if not found [] static member inline CustomSingle<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, query, parameters, mapFunc: System.Func) = - WithConn.Custom.Single<'TDoc>(query, parameters, mapFunc, conn) + Custom.Single<'TDoc>(query, parameters, mapFunc, conn) + + /// Execute a query that returns one or no JSON documents + /// The SqliteConnection on which to run the query + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + [] + static member inline CustomJsonSingle(conn, query, parameters, mapFunc) = + Custom.JsonSingle(query, parameters, mapFunc, conn) /// Execute a query that returns no results - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The query to retrieve the results /// Parameters to use for the query [] static member inline CustomNonQuery(conn, query, parameters) = - WithConn.Custom.nonQuery query parameters conn + Custom.nonQuery query parameters conn /// Execute a query that returns a scalar value - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The query to retrieve the value /// Parameters to use for the query /// The mapping function to obtain the value @@ -262,118 +447,118 @@ type SqliteConnectionCSharpExtensions = [] static member inline CustomScalar<'T when 'T: struct>( conn, query, parameters, mapFunc: System.Func) = - WithConn.Custom.Scalar<'T>(query, parameters, mapFunc, conn) + Custom.Scalar<'T>(query, parameters, mapFunc, conn) /// Create a document table - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table whose existence should be ensured (may include schema) [] static member inline EnsureTable(conn, name) = - WithConn.Definition.ensureTable name conn + Definition.ensureTable name conn /// Create an index on field(s) within documents in the specified table - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table to be indexed (may include schema) /// The name of the index to create /// One or more fields to be indexed [] static member inline EnsureFieldIndex(conn, tableName, indexName, fields) = - WithConn.Definition.ensureFieldIndex tableName indexName fields conn + Definition.ensureFieldIndex tableName indexName fields conn /// Insert a new document - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table into which the document should be inserted (may include schema) /// The document to be inserted [] static member inline Insert<'TDoc>(conn, tableName, document: 'TDoc) = - WithConn.Document.insert<'TDoc> tableName document conn + insert<'TDoc> tableName document conn /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table into which the document should be saved (may include schema) /// The document to be saved [] static member inline Save<'TDoc>(conn, tableName, document: 'TDoc) = - WithConn.Document.save<'TDoc> tableName document conn + save<'TDoc> tableName document conn /// Count all documents in a table - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which documents should be counted (may include schema) /// The count of the documents in the table [] static member inline CountAll(conn, tableName) = - WithConn.Count.all tableName conn + Count.all tableName conn - /// Count matching documents using JSON field comparisons (->> =, etc.) - /// The SqliteConnection on which to run the query + /// Count matching documents using JSON field comparisons (->> =, etc.) + /// The SqliteConnection on which to run the query /// The table in which documents should be counted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// The count of matching documents in the table [] static member inline CountByFields(conn, tableName, howMatched, fields) = - WithConn.Count.byFields tableName howMatched fields conn + Count.byFields tableName howMatched fields conn /// Determine if a document exists for the given ID - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which existence should be checked (may include schema) /// The ID of the document whose existence should be checked /// True if a document exists, false if not [] static member inline ExistsById<'TKey>(conn, tableName, docId: 'TKey) = - WithConn.Exists.byId tableName docId conn + Exists.byId tableName docId conn - /// Determine if a document exists using JSON field comparisons (->> =, etc.) - /// The SqliteConnection on which to run the query + /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// The SqliteConnection on which to run the query /// The table in which existence should be checked (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// True if any matching documents exist, false if not [] static member inline ExistsByFields(conn, tableName, howMatched, fields) = - WithConn.Exists.byFields tableName howMatched fields conn + Exists.byFields tableName howMatched fields conn /// Retrieve all documents in the given table - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// All documents from the given table [] static member inline FindAll<'TDoc>(conn, tableName) = - WithConn.Find.All<'TDoc>(tableName, conn) + Find.All<'TDoc>(tableName, conn) /// Retrieve all documents in the given table ordered by the given fields in the document - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// Fields by which the results should be ordered /// All documents from the given table, ordered by the given fields [] static member inline FindAllOrdered<'TDoc>(conn, tableName, orderFields) = - WithConn.Find.AllOrdered<'TDoc>(tableName, orderFields, conn) + Find.AllOrdered<'TDoc>(tableName, orderFields, conn) /// Retrieve a document by its ID - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// The document if found, null otherwise + /// The document if found, null otherwise [] static member inline FindById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(conn, tableName, docId: 'TKey) = - WithConn.Find.ById<'TKey, 'TDoc>(tableName, docId, conn) + Find.ById<'TKey, 'TDoc>(tableName, docId, conn) - /// Retrieve documents matching JSON field comparisons (->> =, etc.) - /// The SqliteConnection on which to run the query + /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// The SqliteConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// All documents matching the given fields [] static member inline FindByFields<'TDoc>(conn, tableName, howMatched, fields) = - WithConn.Find.ByFields<'TDoc>(tableName, howMatched, fields, conn) + Find.ByFields<'TDoc>(tableName, howMatched, fields, conn) /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in /// the document /// - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -381,108 +566,264 @@ type SqliteConnectionCSharpExtensions = /// All documents matching the given fields, ordered by the other given fields [] static member inline FindByFieldsOrdered<'TDoc>(conn, tableName, howMatched, queryFields, orderFields) = - WithConn.Find.ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) + Find.ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) - /// The SqliteConnection on which to run the query + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// The SqliteConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The first document, or null if not found + /// The first document, or null if not found [] static member inline FindFirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, tableName, howMatched, fields) = - WithConn.Find.FirstByFields<'TDoc>(tableName, howMatched, fields, conn) + Find.FirstByFields<'TDoc>(tableName, howMatched, fields, conn) /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given /// fields in the document /// - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found + /// The first document ordered by the given fields, or null if not found [] static member inline FindFirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, tableName, howMatched, queryFields, orderFields) = - WithConn.Find.FirstByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) + Find.FirstByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) + + /// Retrieve all JSON documents in the given table + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// All JSON documents from the given table + [] + static member inline JsonAll(conn, tableName) = + Json.all tableName conn + + /// Retrieve all JSON documents in the given table ordered by the given fields in the document + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All JSON documents from the given table, ordered by the given fields + [] + static member inline JsonAllOrdered(conn, tableName, orderFields) = + Json.allOrdered tableName orderFields conn + + /// Retrieve a JSON document by its ID + /// The SqliteConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The JSON document if found, an empty JSON document otherwise + [] + static member inline JsonById<'TKey>(conn, tableName, docId: 'TKey) = + Json.byId tableName docId conn + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All JSON documents matching the given fields + [] + static member inline JsonByFields(conn, tableName, howMatched, fields) = + Json.byFields tableName howMatched fields conn + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given fields + /// in the document + /// + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given fields, ordered by the other given fields + [] + static member inline JsonByFieldsOrdered(conn, tableName, howMatched, queryFields, orderFields) = + Json.byFieldsOrdered tableName howMatched queryFields orderFields conn + + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// The SqliteConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The first JSON document if found, an empty JSON document otherwise + [] + static member inline JsonFirstByFields(conn, tableName, howMatched, fields) = + Json.firstByFields tableName howMatched fields conn + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The SqliteConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The first JSON document (in order) if found, an empty JSON document otherwise + [] + static member inline JsonFirstByFieldsOrdered(conn, tableName, howMatched, queryFields, orderFields) = + Json.firstByFieldsOrdered tableName howMatched queryFields orderFields conn + + /// Write all JSON documents in the given table to the given PipeWriter + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + [] + static member inline WriteJsonAll(conn, tableName, writer) = + Json.writeAll tableName writer conn + + /// + /// Write all JSON all documents in the given table to the given PipeWriter, ordered by the given fields in + /// the document + /// + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Fields by which the results should be ordered + [] + static member inline WriteJsonAllOrdered(conn, tableName, writer, orderFields) = + Json.writeAllOrdered tableName writer orderFields conn + + /// Write a JSON document to the given PipeWriter by its ID + /// The SqliteConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The ID of the document to retrieve + [] + static member inline WriteJsonById<'TKey>(conn, tableName, writer, docId: 'TKey) = + Json.writeById tableName writer docId conn + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) + /// + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + static member inline WriteJsonByFields(conn, tableName, writer, howMatched, fields) = + Json.writeByFields tableName writer howMatched fields conn + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) + /// ordered by the given fields in the document + /// + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + static member inline WriteJsonByFieldsOrdered(conn, tableName, writer, howMatched, queryFields, orderFields) = + Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields conn + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons + /// (->> =, etc.) + /// + /// The SqliteConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + static member inline WriteJsonFirstByFields(conn, tableName, writer, howMatched, fields) = + Json.writeFirstByFields tableName writer howMatched fields conn + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The SqliteConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + static member inline WriteJsonFirstByFieldsOrdered(conn, tableName, writer, howMatched, queryFields, orderFields) = + Json.writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields conn /// Update (replace) an entire document by its ID - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which a document should be updated (may include schema) /// The ID of the document to be updated (replaced) /// The new document [] static member inline UpdateById<'TKey, 'TDoc>(conn, tableName, docId: 'TKey, document: 'TDoc) = - WithConn.Update.byId tableName docId document conn + Update.byId tableName docId document conn /// /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document /// - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which a document should be updated (may include schema) /// The function to obtain the ID of the document /// The new document [] static member inline UpdateByFunc<'TKey, 'TDoc>( conn, tableName, idFunc: System.Func<'TDoc, 'TKey>, document: 'TDoc) = - WithConn.Update.ByFunc(tableName, idFunc, document, conn) + Update.ByFunc(tableName, idFunc, document, conn) /// Patch a document by its ID - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which a document should be patched (may include schema) /// The ID of the document to patch /// The partial document to patch the existing document [] static member inline PatchById<'TKey, 'TPatch>(conn, tableName, docId: 'TKey, patch: 'TPatch) = - WithConn.Patch.byId tableName docId patch conn + Patch.byId tableName docId patch conn /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) /// - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which documents should be patched (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// The partial document to patch the existing document [] static member inline PatchByFields<'TPatch>(conn, tableName, howMatched, fields, patch: 'TPatch) = - WithConn.Patch.byFields tableName howMatched fields patch conn + Patch.byFields tableName howMatched fields patch conn /// Remove fields from a document by the document's ID - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which a document should be modified (may include schema) /// The ID of the document to modify /// One or more field names to remove from the document [] static member inline RemoveFieldsById<'TKey>(conn, tableName, docId: 'TKey, fieldNames) = - WithConn.RemoveFields.byId tableName docId fieldNames conn + RemoveFields.byId tableName docId fieldNames conn /// Remove fields from documents via a comparison on JSON fields in the document - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which documents should be modified (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// One or more field names to remove from the matching documents [] static member inline RemoveFieldsByFields(conn, tableName, howMatched, fields, fieldNames) = - WithConn.RemoveFields.byFields tableName howMatched fields fieldNames conn + RemoveFields.byFields tableName howMatched fields fieldNames conn /// Delete a document by its ID - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which a document should be deleted (may include schema) /// The ID of the document to delete [] static member inline DeleteById<'TKey>(conn, tableName, docId: 'TKey) = - WithConn.Delete.byId tableName docId conn + Delete.byId tableName docId conn - /// Delete documents by matching a JSON field comparison query (->> =, etc.) - /// The SqliteConnection on which to run the query + /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// The SqliteConnection on which to run the query /// The table in which documents should be deleted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match [] static member inline DeleteByFields(conn, tableName, howMatched, fields) = - WithConn.Delete.byFields tableName howMatched fields conn + Delete.byFields tableName howMatched fields conn diff --git a/src/Sqlite/Functions.fs b/src/Sqlite/Functions.fs new file mode 100644 index 0000000..c147f9a --- /dev/null +++ b/src/Sqlite/Functions.fs @@ -0,0 +1,636 @@ +namespace BitBadger.Documents.Sqlite + +open Microsoft.Data.Sqlite + +/// Commands to execute custom SQL queries +[] +module Custom = + + /// Execute a query that returns a list of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// A list of results for the given query + [] + let list<'TDoc> query parameters (mapFunc: SqliteDataReader -> 'TDoc) = + use conn = Configuration.dbConn () + WithConn.Custom.list<'TDoc> query parameters mapFunc conn + + /// Execute a query that returns a list of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// A list of results for the given query + let List<'TDoc>(query, parameters, mapFunc: System.Func) = + use conn = Configuration.dbConn () + WithConn.Custom.List<'TDoc>(query, parameters, mapFunc, conn) + + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + [] + let jsonArray query parameters mapFunc = + use conn = Configuration.dbConn () + WithConn.Custom.jsonArray query parameters mapFunc conn + + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + let JsonArray(query, parameters, mapFunc) = + use conn = Configuration.dbConn () + WithConn.Custom.JsonArray(query, parameters, mapFunc, conn) + + /// Execute a query, writing its results to the given PipeWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The PipeWriter to which the results should be written + /// The mapping function to extract the document + [] + let writeJsonArray query parameters writer mapFunc = + use conn = Configuration.dbConn () + WithConn.Custom.writeJsonArray query parameters writer mapFunc conn + + /// Execute a query, writing its results to the given PipeWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The PipeWriter to which the results should be written + /// The mapping function to extract the document + let WriteJsonArray(query, parameters, writer, mapFunc) = + use conn = Configuration.dbConn () + WithConn.Custom.WriteJsonArray(query, parameters, writer, mapFunc, conn) + + /// Execute a query that returns one or no results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// Some with the first matching result, or None if not found + [] + let single<'TDoc> query parameters (mapFunc: SqliteDataReader -> 'TDoc) = + use conn = Configuration.dbConn () + WithConn.Custom.single<'TDoc> query parameters mapFunc conn + + /// Execute a query that returns one or no results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// The first matching result, or null if not found + let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>( + query, parameters, mapFunc: System.Func) = + use conn = Configuration.dbConn () + WithConn.Custom.Single<'TDoc>(query, parameters, mapFunc, conn) + + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + [] + let jsonSingle query parameters mapFunc = + use conn = Configuration.dbConn () + WithConn.Custom.jsonSingle query parameters mapFunc conn + + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + let JsonSingle(query, parameters, mapFunc) = + use conn = Configuration.dbConn () + WithConn.Custom.JsonSingle(query, parameters, mapFunc, conn) + + /// Execute a query that returns no results + /// The query to retrieve the results + /// Parameters to use for the query + [] + let nonQuery query parameters = + use conn = Configuration.dbConn () + WithConn.Custom.nonQuery query parameters conn + + /// Execute a query that returns a scalar value + /// The query to retrieve the value + /// Parameters to use for the query + /// The mapping function to obtain the value + /// The scalar value for the query + [] + let scalar<'T when 'T: struct> query parameters (mapFunc: SqliteDataReader -> 'T) = + use conn = Configuration.dbConn () + WithConn.Custom.scalar<'T> query parameters mapFunc conn + + /// Execute a query that returns a scalar value + /// The query to retrieve the value + /// Parameters to use for the query + /// The mapping function to obtain the value + /// The scalar value for the query + let Scalar<'T when 'T: struct>(query, parameters, mapFunc: System.Func) = + use conn = Configuration.dbConn () + WithConn.Custom.Scalar<'T>(query, parameters, mapFunc, conn) + + +/// Functions to create tables and indexes +[] +module Definition = + + /// Create a document table + /// The table whose existence should be ensured (may include schema) + [] + let ensureTable name = + use conn = Configuration.dbConn () + WithConn.Definition.ensureTable name conn + + /// Create an index on field(s) within documents in the specified table + /// The table to be indexed (may include schema) + /// The name of the index to create + /// One or more fields to be indexed + [] + let ensureFieldIndex tableName indexName fields = + use conn = Configuration.dbConn () + WithConn.Definition.ensureFieldIndex tableName indexName fields conn + + +/// Document insert/save functions +[] +module Document = + + /// Insert a new document + /// The table into which the document should be inserted (may include schema) + /// The document to be inserted + [] + let insert<'TDoc> tableName (document: 'TDoc) = + use conn = Configuration.dbConn () + WithConn.Document.insert tableName document conn + + /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") + /// The table into which the document should be saved (may include schema) + /// The document to be saved + [] + let save<'TDoc> tableName (document: 'TDoc) = + use conn = Configuration.dbConn () + WithConn.Document.save tableName document conn + + +/// Commands to count documents +[] +module Count = + + /// Count all documents in a table + /// The table in which documents should be counted (may include schema) + /// The count of the documents in the table + [] + let all tableName = + use conn = Configuration.dbConn () + WithConn.Count.all tableName conn + + /// Count matching documents using JSON field comparisons (->> =, etc.) + /// The table in which documents should be counted (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The count of matching documents in the table + [] + let byFields tableName howMatched fields = + use conn = Configuration.dbConn () + WithConn.Count.byFields tableName howMatched fields conn + + +/// Commands to determine if documents exist +[] +module Exists = + + /// Determine if a document exists for the given ID + /// The table in which existence should be checked (may include schema) + /// The ID of the document whose existence should be checked + /// True if a document exists, false if not + [] + let byId tableName (docId: 'TKey) = + use conn = Configuration.dbConn () + WithConn.Exists.byId tableName docId conn + + /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// The table in which existence should be checked (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// True if any matching documents exist, false if not + [] + let byFields tableName howMatched fields = + use conn = Configuration.dbConn () + WithConn.Exists.byFields tableName howMatched fields conn + + +/// Commands to retrieve documents +[] +module Find = + + /// Retrieve all documents in the given table + /// The table from which documents should be retrieved (may include schema) + /// All documents from the given table + [] + let all<'TDoc> tableName = + use conn = Configuration.dbConn () + WithConn.Find.all<'TDoc> tableName conn + + /// Retrieve all documents in the given table + /// The table from which documents should be retrieved (may include schema) + /// All documents from the given table + let All<'TDoc> tableName = + use conn = Configuration.dbConn () + WithConn.Find.All<'TDoc>(tableName, conn) + + /// Retrieve all documents in the given table ordered by the given fields in the document + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All documents from the given table, ordered by the given fields + [] + let allOrdered<'TDoc> tableName orderFields = + use conn = Configuration.dbConn () + WithConn.Find.allOrdered<'TDoc> tableName orderFields conn + + /// Retrieve all documents in the given table ordered by the given fields in the document + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All documents from the given table, ordered by the given fields + let AllOrdered<'TDoc> tableName orderFields = + use conn = Configuration.dbConn () + WithConn.Find.AllOrdered<'TDoc>(tableName, orderFields, conn) + + /// Retrieve a document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// Some with the document if found, None otherwise + [] + let byId<'TKey, 'TDoc> tableName docId = + use conn = Configuration.dbConn () + WithConn.Find.byId<'TKey, 'TDoc> tableName docId conn + + /// Retrieve a document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The document if found, null otherwise + let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId) = + use conn = Configuration.dbConn () + WithConn.Find.ById<'TKey, 'TDoc>(tableName, docId, conn) + + /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All documents matching the given fields + [] + let byFields<'TDoc> tableName howMatched fields = + use conn = Configuration.dbConn () + WithConn.Find.byFields<'TDoc> tableName howMatched fields conn + + /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All documents matching the given fields + let ByFields<'TDoc>(tableName, howMatched, fields) = + use conn = Configuration.dbConn () + WithConn.Find.ByFields<'TDoc>(tableName, howMatched, fields, conn) + + /// + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All documents matching the given fields, ordered by the other given fields + [] + let byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = + use conn = Configuration.dbConn () + WithConn.Find.byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn + + /// + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All documents matching the given fields, ordered by the other given fields + let ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields) = + use conn = Configuration.dbConn () + WithConn.Find.ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) + + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Some with the first document, or None if not found + [] + let firstByFields<'TDoc> tableName howMatched fields = + use conn = Configuration.dbConn () + WithConn.Find.firstByFields<'TDoc> tableName howMatched fields conn + + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The first document, or null if not found + let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields) = + use conn = Configuration.dbConn () + WithConn.Find.FirstByFields<'TDoc>(tableName, howMatched, fields, conn) + + /// + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// + /// Some with the first document ordered by the given fields, or None if not found + /// + [] + let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = + use conn = Configuration.dbConn () + WithConn.Find.firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn + + /// + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The first document ordered by the given fields, or null if not found + let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( + tableName, howMatched, queryFields, orderFields) = + use conn = Configuration.dbConn () + WithConn.Find.FirstByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) + + +/// Commands to retrieve documents as raw JSON +[] +module Json = + + /// Retrieve all JSON documents in the given table + /// The table from which documents should be retrieved (may include schema) + /// All JSON documents from the given table + [] + let all tableName = + use conn = Configuration.dbConn () + WithConn.Json.all tableName conn + + /// Retrieve all JSON documents in the given table ordered by the given fields in the document + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All JSON documents from the given table, ordered by the given fields + [] + let allOrdered tableName orderFields = + use conn = Configuration.dbConn () + WithConn.Json.allOrdered tableName orderFields conn + + /// Retrieve a JSON document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The JSON document if found, an empty JSON document otherwise + [] + let byId<'TKey> tableName (docId: 'TKey) = + use conn = Configuration.dbConn () + WithConn.Json.byId tableName docId conn + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All JSON documents matching the given fields + [] + let byFields tableName howMatched fields = + use conn = Configuration.dbConn () + WithConn.Json.byFields tableName howMatched fields conn + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given fields + /// in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given fields, ordered by the other given fields + [] + let byFieldsOrdered tableName howMatched queryFields orderFields = + use conn = Configuration.dbConn () + WithConn.Json.byFieldsOrdered tableName howMatched queryFields orderFields conn + + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The first JSON document if found, an empty JSON document otherwise + [] + let firstByFields tableName howMatched fields = + use conn = Configuration.dbConn () + WithConn.Json.firstByFields tableName howMatched fields conn + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The first JSON document (in order) if found, an empty JSON document otherwise + [] + let firstByFieldsOrdered tableName howMatched queryFields orderFields = + use conn = Configuration.dbConn () + WithConn.Json.firstByFieldsOrdered tableName howMatched queryFields orderFields conn + + /// Write all JSON documents in the given table to the given PipeWriter + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + [] + let writeAll tableName writer = + use conn = Configuration.dbConn () + WithConn.Json.writeAll tableName writer conn + + /// + /// Write all JSON all documents in the given table to the given PipeWriter, ordered by the given fields in + /// the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Fields by which the results should be ordered + [] + let writeAllOrdered tableName writer orderFields = + use conn = Configuration.dbConn () + WithConn.Json.writeAllOrdered tableName writer orderFields conn + + /// Write a JSON document to the given PipeWriter by its ID + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The ID of the document to retrieve + [] + let writeById<'TKey> tableName writer (docId: 'TKey) = + use conn = Configuration.dbConn () + WithConn.Json.writeById tableName writer docId conn + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + let writeByFields tableName writer howMatched fields = + use conn = Configuration.dbConn () + WithConn.Json.writeByFields tableName writer howMatched fields conn + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) + /// ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + let writeByFieldsOrdered tableName writer howMatched queryFields orderFields = + use conn = Configuration.dbConn () + WithConn.Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields conn + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons + /// (->> =, etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + let writeFirstByFields tableName writer howMatched fields = + use conn = Configuration.dbConn () + WithConn.Json.writeFirstByFields tableName writer howMatched fields conn + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + let writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields = + use conn = Configuration.dbConn () + WithConn.Json.writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields conn + + +/// Commands to update documents +[] +module Update = + + /// Update (replace) an entire document by its ID + /// The table in which a document should be updated (may include schema) + /// The ID of the document to be updated (replaced) + /// The new document + [] + let byId tableName (docId: 'TKey) (document: 'TDoc) = + use conn = Configuration.dbConn () + WithConn.Update.byId tableName docId document conn + + /// + /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document + /// + /// The table in which a document should be updated (may include schema) + /// The function to obtain the ID of the document + /// The new document + [] + let byFunc tableName (idFunc: 'TDoc -> 'TKey) (document: 'TDoc) = + use conn = Configuration.dbConn () + WithConn.Update.byFunc tableName idFunc document conn + + /// + /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document + /// + /// The table in which a document should be updated (may include schema) + /// The function to obtain the ID of the document + /// The new document + let ByFunc(tableName, idFunc: System.Func<'TDoc, 'TKey>, document: 'TDoc) = + use conn = Configuration.dbConn () + WithConn.Update.ByFunc(tableName, idFunc, document, conn) + + +/// Commands to patch (partially update) documents +[] +module Patch = + + /// Patch a document by its ID + /// The table in which a document should be patched (may include schema) + /// The ID of the document to patch + /// The partial document to patch the existing document + [] + let byId tableName (docId: 'TKey) (patch: 'TPatch) = + use conn = Configuration.dbConn () + WithConn.Patch.byId tableName docId patch conn + + /// + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) + /// + /// The table in which documents should be patched (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The partial document to patch the existing document + [] + let byFields tableName howMatched fields (patch: 'TPatch) = + use conn = Configuration.dbConn () + WithConn.Patch.byFields tableName howMatched fields patch conn + + +/// Commands to remove fields from documents +[] +module RemoveFields = + + /// Remove fields from a document by the document's ID + /// The table in which a document should be modified (may include schema) + /// The ID of the document to modify + /// One or more field names to remove from the document + [] + let byId tableName (docId: 'TKey) fieldNames = + use conn = Configuration.dbConn () + WithConn.RemoveFields.byId tableName docId fieldNames conn + + /// Remove fields from documents via a comparison on JSON fields in the document + /// The table in which documents should be modified (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// One or more field names to remove from the matching documents + [] + let byFields tableName howMatched fields fieldNames = + use conn = Configuration.dbConn () + WithConn.RemoveFields.byFields tableName howMatched fields fieldNames conn + + +/// Commands to delete documents +[] +module Delete = + + /// Delete a document by its ID + /// The table in which a document should be deleted (may include schema) + /// The ID of the document to delete + [] + let byId tableName (docId: 'TKey) = + use conn = Configuration.dbConn () + WithConn.Delete.byId tableName docId conn + + /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// The table in which documents should be deleted (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + let byFields tableName howMatched fields = + use conn = Configuration.dbConn () + WithConn.Delete.byFields tableName howMatched fields conn diff --git a/src/Sqlite/Library.fs b/src/Sqlite/Library.fs index 215a62b..0b44915 100644 --- a/src/Sqlite/Library.fs +++ b/src/Sqlite/Library.fs @@ -1,6 +1,5 @@ namespace BitBadger.Documents.Sqlite -open BitBadger.Documents open Microsoft.Data.Sqlite /// Configuration for document handling @@ -32,16 +31,16 @@ module Configuration = | None -> invalidOp "Please provide a connection string before attempting data access" +open BitBadger.Documents + /// Query definitions [] module Query = - /// - /// Create a WHERE clause fragment to implement a comparison on fields in a JSON document - /// + /// Create a WHERE clause fragment to implement a comparison on fields in a JSON document /// How the fields should be matched /// The fields for the comparisons - /// A WHERE clause implementing the comparisons for the given fields + /// A WHERE clause implementing the comparisons for the given fields [] let whereByFields (howMatched: FieldMatch) fields = let name = ParameterName() @@ -63,21 +62,21 @@ module Query = | _ -> $"{it.Path SQLite AsSql} {it.Comparison.OpSql} {name.Derive it.ParameterName}") |> String.concat $" {howMatched} " - /// Create a WHERE clause fragment to implement an ID-based query + /// Create a WHERE clause fragment to implement an ID-based query /// The ID of the document - /// A WHERE clause fragment identifying a document by its ID + /// A WHERE clause fragment identifying a document by its ID [] let whereById (docId: 'TKey) = whereByFields Any [ { Field.Equal (Configuration.idField ()) docId with ParameterName = Some "@id" } ] - /// Create an UPDATE statement to patch documents + /// Create an UPDATE statement to patch documents /// The table to be updated /// A query to patch documents [] let patch tableName = $"UPDATE %s{tableName} SET data = json_patch(data, json(@data))" - /// Create an UPDATE statement to remove fields from documents + /// Create an UPDATE statement to remove fields from documents /// The table to be updated /// The parameters with the field names to be removed /// A query to remove fields from documents @@ -136,7 +135,7 @@ module Parameters = SqliteParameter(name, Configuration.serializer().Serialize it) /// Create JSON field parameters - /// The Fields to convert to parameters + /// The Fields to convert to parameters /// The current parameters for the query /// A unified sequence of parameter names and values [] @@ -169,7 +168,7 @@ module Parameters = /// Append JSON field name parameters for the given field names to the given parameters /// The name of the parameter to use for each field /// The names of fields to be addressed - /// The name (@name) and parameter value for the field names + /// The name (@name) and parameter value for the field names [] let fieldNameParams paramName fieldNames = fieldNames @@ -183,20 +182,22 @@ module Parameters = Seq.empty +open System.Text + /// Helper functions for handling results [] module Results = /// Create a domain item from a document, specifying the field in which the document is found /// The field name containing the JSON document - /// A SqliteDataReader set to the row with the document to be constructed + /// A SqliteDataReader set to the row with the document to be constructed /// The constructed domain item [] let fromDocument<'TDoc> field (rdr: SqliteDataReader) : 'TDoc = Configuration.serializer().Deserialize<'TDoc>(rdr.GetString(rdr.GetOrdinal field)) /// Create a domain item from a document - /// A SqliteDataReader set to the row with the document to be constructed + /// A SqliteDataReader set to the row with the document to be constructed /// The constructed domain item [] let fromData<'TDoc> rdr = @@ -232,20 +233,82 @@ module Results = } /// Extract a count from the first column - /// A SqliteDataReader set to the row with the count to retrieve + /// A SqliteDataReader set to the row with the count to retrieve /// The count from the row [] let toCount (rdr: SqliteDataReader) = rdr.GetInt64 0 /// Extract a true/false value from the first column - /// A SqliteDataReader set to the row with the true/false value to retrieve + /// A SqliteDataReader set to the row with the true/false value to retrieve /// The true/false value from the row /// SQLite implements boolean as 1 = true, 0 = false [] let toExists rdr = toCount rdr > 0L + /// Retrieve a JSON document, specifying the field in which the document is found + /// The field name containing the JSON document + /// A SqliteDataReader set to the row with the document to be constructed + /// The JSON document (an empty JSON document if not found) + [] + let jsonFromDocument field (rdr: SqliteDataReader) = + try + let idx = rdr.GetOrdinal field + if rdr.IsDBNull idx then "{}" else rdr.GetString idx + with :? System.IndexOutOfRangeException -> "{}" + + /// Retrieve a JSON document + /// A SqliteDataReader set to the row with the document to be constructed + /// The JSON document (an empty JSON document if not found) + [] + let jsonFromData rdr = + jsonFromDocument "data" rdr + + /// + /// Create a JSON array for the results of the given command, using the specified mapping function + /// + /// The command to execute + /// The mapping function to extract JSON from the query's results + /// A JSON array of items from the reader + [] + let toJsonArray (cmd: SqliteCommand) (mapFunc: SqliteDataReader -> string) = backgroundTask { + use! rdr = cmd.ExecuteReaderAsync() + let it = StringBuilder "[" + while! rdr.ReadAsync() do + if it.Length > 2 then ignore (it.Append ",") + it.Append(mapFunc rdr) |> ignore + return it.Append("]").ToString() + } + + /// + /// Create a JSON array for the results of the given command, using the specified mapping function + /// + /// The command to execute + /// The mapping function to extract JSON from the query's results + /// A JSON array of items from the reader + let ToJsonArray (cmd: SqliteCommand) (mapFunc: System.Func) = + toJsonArray cmd mapFunc.Invoke + + /// Write a JSON array of items for the results of a query to the given StreamWriter + /// The command to execute + /// The StreamWriter to which results should be written + /// The mapping function to extract JSON from the query's results + [] + let writeJsonArray (cmd: SqliteCommand) writer (mapFunc: SqliteDataReader -> string) = backgroundTask { + use! rdr = cmd.ExecuteReaderAsync() + return + seq { while rdr.Read() do yield mapFunc rdr } + |> PipeWriter.writeStrings writer + } + + /// Write a JSON array of items for the results of a query to the given StreamWriter + /// The command to execute + /// The StreamWriter to which results should be written + /// The mapping function to extract JSON from the query's results + let WriteJsonArray (cmd: SqliteCommand) writer (mapFunc: System.Func) = + writeJsonArray cmd writer mapFunc.Invoke + [] module internal Helpers = @@ -256,928 +319,3 @@ module internal Helpers = let! _ = cmd.ExecuteNonQueryAsync() () } - - -/// Versions of queries that accept a SqliteConnection as the last parameter -module WithConn = - - /// Commands to execute custom SQL queries - [] - module Custom = - - /// Execute a query that returns a list of results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// The SqliteConnection to use to execute the query - /// A list of results for the given query - [] - let list<'TDoc> query (parameters: SqliteParameter seq) (mapFunc: SqliteDataReader -> 'TDoc) - (conn: SqliteConnection) = - use cmd = conn.CreateCommand() - cmd.CommandText <- query - cmd.Parameters.AddRange parameters - toCustomList<'TDoc> cmd mapFunc - - /// Execute a query that returns a list of results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// The SqliteConnection to use to execute the query - /// A list of results for the given query - let List<'TDoc>( - query, parameters: SqliteParameter seq, mapFunc: System.Func, - conn: SqliteConnection - ) = - use cmd = conn.CreateCommand() - cmd.CommandText <- query - cmd.Parameters.AddRange parameters - ToCustomList<'TDoc>(cmd, mapFunc) - - /// Execute a query that returns one or no results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// The SqliteConnection to use to execute the query - /// Some with the first matching result, or None if not found - [] - let single<'TDoc> query parameters (mapFunc: SqliteDataReader -> 'TDoc) conn = backgroundTask { - let! results = list query parameters mapFunc conn - return FSharp.Collections.List.tryHead results - } - - /// Execute a query that returns one or no results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// The SqliteConnection to use to execute the query - /// The first matching result, or null if not found - let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>( - query, parameters, mapFunc: System.Func, conn - ) = backgroundTask { - let! result = single<'TDoc> query parameters mapFunc.Invoke conn - return Option.toObj result - } - - /// Execute a query that returns no results - /// The query to retrieve the results - /// Parameters to use for the query - /// The SqliteConnection to use to execute the query - [] - let nonQuery query (parameters: SqliteParameter seq) (conn: SqliteConnection) = - use cmd = conn.CreateCommand() - cmd.CommandText <- query - cmd.Parameters.AddRange parameters - write cmd - - /// Execute a query that returns a scalar value - /// The query to retrieve the value - /// Parameters to use for the query - /// The mapping function to obtain the value - /// The SqliteConnection to use to execute the query - /// The scalar value for the query - [] - let scalar<'T when 'T : struct> query (parameters: SqliteParameter seq) (mapFunc: SqliteDataReader -> 'T) - (conn: SqliteConnection) = backgroundTask { - use cmd = conn.CreateCommand() - cmd.CommandText <- query - cmd.Parameters.AddRange parameters - use! rdr = cmd.ExecuteReaderAsync() - let! isFound = rdr.ReadAsync() - return if isFound then mapFunc rdr else Unchecked.defaultof<'T> - } - - /// Execute a query that returns a scalar value - /// The query to retrieve the value - /// Parameters to use for the query - /// The mapping function to obtain the value - /// The SqliteConnection to use to execute the query - /// The scalar value for the query - let Scalar<'T when 'T: struct>(query, parameters, mapFunc: System.Func, conn) = - scalar<'T> query parameters mapFunc.Invoke conn - - /// Functions to create tables and indexes - [] - module Definition = - - /// Create a document table - /// The table whose existence should be ensured (may include schema) - /// The SqliteConnection to use to execute the query - [] - let ensureTable name conn = backgroundTask { - do! Custom.nonQuery (Query.Definition.ensureTable name) [] conn - do! Custom.nonQuery (Query.Definition.ensureKey name SQLite) [] conn - } - - /// Create an index on field(s) within documents in the specified table - /// The table to be indexed (may include schema) - /// The name of the index to create - /// One or more fields to be indexed - /// The SqliteConnection to use to execute the query - [] - let ensureFieldIndex tableName indexName fields conn = - Custom.nonQuery (Query.Definition.ensureIndexOn tableName indexName fields SQLite) [] conn - - /// Commands to add documents - [] - module Document = - - /// Insert a new document - /// The table into which the document should be inserted (may include schema) - /// The document to be inserted - /// The SqliteConnection to use to execute the query - [] - let insert<'TDoc> tableName (document: 'TDoc) conn = - let query = - match Configuration.autoIdStrategy () with - | Disabled -> Query.insert tableName - | strategy -> - let idField = Configuration.idField () - let dataParam = - if AutoId.NeedsAutoId strategy document idField then - match strategy with - | Number -> $"(SELECT coalesce(max(data->>'{idField}'), 0) + 1 FROM {tableName})" - | Guid -> $"'{AutoId.GenerateGuid()}'" - | RandomString -> $"'{AutoId.GenerateRandomString(Configuration.idStringLength ())}'" - | Disabled -> "@data" - |> function it -> $"json_set(@data, '$.{idField}', {it})" - else "@data" - (Query.insert tableName).Replace("@data", dataParam) - Custom.nonQuery query [ jsonParam "@data" document ] conn - - /// - /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") - /// - /// The table into which the document should be saved (may include schema) - /// The document to be saved - /// The SqliteConnection to use to execute the query - [] - let save<'TDoc> tableName (document: 'TDoc) conn = - Custom.nonQuery (Query.save tableName) [ jsonParam "@data" document ] conn - - /// Commands to count documents - [] - module Count = - - /// Count all documents in a table - /// The table in which documents should be counted (may include schema) - /// The SqliteConnection to use to execute the query - /// The count of the documents in the table - [] - let all tableName conn = - Custom.scalar (Query.count tableName) [] toCount conn - - /// Count matching documents using JSON field comparisons (->> =, etc.) - /// The table in which documents should be counted (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The SqliteConnection to use to execute the query - /// The count of matching documents in the table - [] - let byFields tableName howMatched fields conn = - Custom.scalar - (Query.byFields (Query.count tableName) howMatched fields) (addFieldParams fields []) toCount conn - - /// Commands to determine if documents exist - [] - module Exists = - - /// Determine if a document exists for the given ID - /// The table in which existence should be checked (may include schema) - /// The ID of the document whose existence should be checked - /// The SqliteConnection to use to execute the query - /// True if a document exists, false if not - [] - let byId tableName (docId: 'TKey) conn = - Custom.scalar (Query.exists tableName (Query.whereById docId)) [ idParam docId ] toExists conn - - /// Determine if a document exists using JSON field comparisons (->> =, etc.) - /// The table in which existence should be checked (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The SqliteConnection to use to execute the query - /// True if any matching documents exist, false if not - [] - let byFields tableName howMatched fields conn = - Custom.scalar - (Query.exists tableName (Query.whereByFields howMatched fields)) - (addFieldParams fields []) - toExists - conn - - /// Commands to retrieve documents - [] - module Find = - - /// Retrieve all documents in the given table - /// The table from which documents should be retrieved (may include schema) - /// The SqliteConnection to use to execute the query - /// All documents from the given table - [] - let all<'TDoc> tableName conn = - Custom.list<'TDoc> (Query.find tableName) [] fromData<'TDoc> conn - - /// Retrieve all documents in the given table - /// The table from which documents should be retrieved (may include schema) - /// The SqliteConnection to use to execute the query - /// All documents from the given table - let All<'TDoc>(tableName, conn) = - Custom.List(Query.find tableName, [], fromData<'TDoc>, conn) - - /// Retrieve all documents in the given table ordered by the given fields in the document - /// The table from which documents should be retrieved (may include schema) - /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query - /// All documents from the given table, ordered by the given fields - [] - let allOrdered<'TDoc> tableName orderFields conn = - Custom.list<'TDoc> (Query.find tableName + Query.orderBy orderFields SQLite) [] fromData<'TDoc> conn - - /// Retrieve all documents in the given table ordered by the given fields in the document - /// The table from which documents should be retrieved (may include schema) - /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query - /// All documents from the given table, ordered by the given fields - let AllOrdered<'TDoc>(tableName, orderFields, conn) = - Custom.List(Query.find tableName + Query.orderBy orderFields SQLite, [], fromData<'TDoc>, conn) - - /// Retrieve a document by its ID - /// The table from which a document should be retrieved (may include schema) - /// The ID of the document to retrieve - /// The SqliteConnection to use to execute the query - /// Some with the document if found, None otherwise - [] - let byId<'TKey, 'TDoc> tableName (docId: 'TKey) conn = - Custom.single<'TDoc> (Query.byId (Query.find tableName) docId) [ idParam docId ] fromData<'TDoc> conn - - /// Retrieve a document by its ID - /// The table from which a document should be retrieved (may include schema) - /// The ID of the document to retrieve - /// The SqliteConnection to use to execute the query - /// The document if found, null otherwise - let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId: 'TKey, conn) = - Custom.Single<'TDoc>(Query.byId (Query.find tableName) docId, [ idParam docId ], fromData<'TDoc>, conn) - - /// Retrieve documents matching JSON field comparisons (->> =, etc.) - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The SqliteConnection to use to execute the query - /// All documents matching the given fields - [] - let byFields<'TDoc> tableName howMatched fields conn = - Custom.list<'TDoc> - (Query.byFields (Query.find tableName) howMatched fields) - (addFieldParams fields []) - fromData<'TDoc> - conn - - /// Retrieve documents matching JSON field comparisons (->> =, etc.) - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The SqliteConnection to use to execute the query - /// All documents matching the given fields - let ByFields<'TDoc>(tableName, howMatched, fields, conn) = - Custom.List<'TDoc>( - Query.byFields (Query.find tableName) howMatched fields, - addFieldParams fields [], - fromData<'TDoc>, - conn) - - /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields - /// in the document - /// - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query - /// All documents matching the given fields, ordered by the other given fields - [] - let byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn = - Custom.list<'TDoc> - (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite) - (addFieldParams queryFields []) - fromData<'TDoc> - conn - - /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields - /// in the document - /// - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query - /// All documents matching the given fields, ordered by the other given fields - let ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) = - Custom.List<'TDoc>( - Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite, - addFieldParams queryFields [], - fromData<'TDoc>, - conn) - - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The SqliteConnection to use to execute the query - /// Some with the first document, or None if not found - [] - let firstByFields<'TDoc> tableName howMatched fields conn = - Custom.single - $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1" - (addFieldParams fields []) - fromData<'TDoc> - conn - - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The SqliteConnection to use to execute the query - /// The first document, or null if not found - let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields, conn) = - Custom.Single( - $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1", - addFieldParams fields [], - fromData<'TDoc>, - conn) - - /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the - /// given fields in the document - /// - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query - /// - /// Some with the first document ordered by the given fields, or None if not found - /// - [] - let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn = - Custom.single - $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields SQLite} LIMIT 1" - (addFieldParams queryFields []) - fromData<'TDoc> - conn - - /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the - /// given fields in the document - /// - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query - /// The first document ordered by the given fields, or null if not found - let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( - tableName, howMatched, queryFields, orderFields, conn) = - Custom.Single( - $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields SQLite} LIMIT 1", - addFieldParams queryFields [], - fromData<'TDoc>, - conn) - - /// Commands to update documents - [] - module Update = - - /// Update (replace) an entire document by its ID - /// The table in which a document should be updated (may include schema) - /// The ID of the document to be updated (replaced) - /// The new document - /// The SqliteConnection to use to execute the query - [] - let byId tableName (docId: 'TKey) (document: 'TDoc) conn = - Custom.nonQuery - (Query.statementWhere (Query.update tableName) (Query.whereById docId)) - [ idParam docId; jsonParam "@data" document ] - conn - - /// - /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the - /// document - /// - /// The table in which a document should be updated (may include schema) - /// The function to obtain the ID of the document - /// The new document - /// The SqliteConnection to use to execute the query - [] - let byFunc tableName (idFunc: 'TDoc -> 'TKey) (document: 'TDoc) conn = - byId tableName (idFunc document) document conn - - /// - /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the - /// document - /// - /// The table in which a document should be updated (may include schema) - /// The function to obtain the ID of the document - /// The new document - /// The SqliteConnection to use to execute the query - let ByFunc(tableName, idFunc: System.Func<'TDoc, 'TKey>, document: 'TDoc, conn) = - byFunc tableName idFunc.Invoke document conn - - /// Commands to patch (partially update) documents - [] - module Patch = - - /// Patch a document by its ID - /// The table in which a document should be patched (may include schema) - /// The ID of the document to patch - /// The partial document to patch the existing document - /// The SqliteConnection to use to execute the query - [] - let byId tableName (docId: 'TKey) (patch: 'TPatch) conn = - Custom.nonQuery - (Query.byId (Query.patch tableName) docId) [ idParam docId; jsonParam "@data" patch ] conn - - /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, - /// etc.) - /// - /// The table in which documents should be patched (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The partial document to patch the existing document - /// The SqliteConnection to use to execute the query - [] - let byFields tableName howMatched fields (patch: 'TPatch) conn = - Custom.nonQuery - (Query.byFields (Query.patch tableName) howMatched fields) - (addFieldParams fields [ jsonParam "@data" patch ]) - conn - - /// Commands to remove fields from documents - [] - module RemoveFields = - - /// Remove fields from a document by the document's ID - /// The table in which a document should be modified (may include schema) - /// The ID of the document to modify - /// One or more field names to remove from the document - /// The SqliteConnection to use to execute the query - [] - let byId tableName (docId: 'TKey) fieldNames conn = - let nameParams = fieldNameParams "@name" fieldNames - Custom.nonQuery - (Query.byId (Query.removeFields tableName nameParams) docId) - (idParam docId |> Seq.singleton |> Seq.append nameParams) - conn - - /// Remove fields from documents via a comparison on JSON fields in the document - /// The table in which documents should be modified (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// One or more field names to remove from the matching documents - /// The SqliteConnection to use to execute the query - [] - let byFields tableName howMatched fields fieldNames conn = - let nameParams = fieldNameParams "@name" fieldNames - Custom.nonQuery - (Query.byFields (Query.removeFields tableName nameParams) howMatched fields) - (addFieldParams fields nameParams) - conn - - /// Commands to delete documents - [] - module Delete = - - /// Delete a document by its ID - /// The table in which a document should be deleted (may include schema) - /// The ID of the document to delete - /// The SqliteConnection to use to execute the query - [] - let byId tableName (docId: 'TKey) conn = - Custom.nonQuery (Query.byId (Query.delete tableName) docId) [ idParam docId ] conn - - /// Delete documents by matching a JSON field comparison query (->> =, etc.) - /// The table in which documents should be deleted (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The SqliteConnection to use to execute the query - [] - let byFields tableName howMatched fields conn = - Custom.nonQuery (Query.byFields (Query.delete tableName) howMatched fields) (addFieldParams fields []) conn - - -/// Commands to execute custom SQL queries -[] -module Custom = - - /// Execute a query that returns a list of results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// A list of results for the given query - [] - let list<'TDoc> query parameters (mapFunc: SqliteDataReader -> 'TDoc) = - use conn = Configuration.dbConn () - WithConn.Custom.list<'TDoc> query parameters mapFunc conn - - /// Execute a query that returns a list of results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// A list of results for the given query - let List<'TDoc>(query, parameters, mapFunc: System.Func) = - use conn = Configuration.dbConn () - WithConn.Custom.List<'TDoc>(query, parameters, mapFunc, conn) - - /// Execute a query that returns one or no results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// Some with the first matching result, or None if not found - [] - let single<'TDoc> query parameters (mapFunc: SqliteDataReader -> 'TDoc) = - use conn = Configuration.dbConn () - WithConn.Custom.single<'TDoc> query parameters mapFunc conn - - /// Execute a query that returns one or no results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// The first matching result, or null if not found - let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>( - query, parameters, mapFunc: System.Func) = - use conn = Configuration.dbConn () - WithConn.Custom.Single<'TDoc>(query, parameters, mapFunc, conn) - - /// Execute a query that returns no results - /// The query to retrieve the results - /// Parameters to use for the query - [] - let nonQuery query parameters = - use conn = Configuration.dbConn () - WithConn.Custom.nonQuery query parameters conn - - /// Execute a query that returns a scalar value - /// The query to retrieve the value - /// Parameters to use for the query - /// The mapping function to obtain the value - /// The scalar value for the query - [] - let scalar<'T when 'T: struct> query parameters (mapFunc: SqliteDataReader -> 'T) = - use conn = Configuration.dbConn () - WithConn.Custom.scalar<'T> query parameters mapFunc conn - - /// Execute a query that returns a scalar value - /// The query to retrieve the value - /// Parameters to use for the query - /// The mapping function to obtain the value - /// The scalar value for the query - let Scalar<'T when 'T: struct>(query, parameters, mapFunc: System.Func) = - use conn = Configuration.dbConn () - WithConn.Custom.Scalar<'T>(query, parameters, mapFunc, conn) - - -/// Functions to create tables and indexes -[] -module Definition = - - /// Create a document table - /// The table whose existence should be ensured (may include schema) - [] - let ensureTable name = - use conn = Configuration.dbConn () - WithConn.Definition.ensureTable name conn - - /// Create an index on field(s) within documents in the specified table - /// The table to be indexed (may include schema) - /// The name of the index to create - /// One or more fields to be indexed - [] - let ensureFieldIndex tableName indexName fields = - use conn = Configuration.dbConn () - WithConn.Definition.ensureFieldIndex tableName indexName fields conn - - -/// Document insert/save functions -[] -module Document = - - /// Insert a new document - /// The table into which the document should be inserted (may include schema) - /// The document to be inserted - [] - let insert<'TDoc> tableName (document: 'TDoc) = - use conn = Configuration.dbConn () - WithConn.Document.insert tableName document conn - - /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") - /// The table into which the document should be saved (may include schema) - /// The document to be saved - [] - let save<'TDoc> tableName (document: 'TDoc) = - use conn = Configuration.dbConn () - WithConn.Document.save tableName document conn - - -/// Commands to count documents -[] -module Count = - - /// Count all documents in a table - /// The table in which documents should be counted (may include schema) - /// The count of the documents in the table - [] - let all tableName = - use conn = Configuration.dbConn () - WithConn.Count.all tableName conn - - /// Count matching documents using JSON field comparisons (->> =, etc.) - /// The table in which documents should be counted (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The count of matching documents in the table - [] - let byFields tableName howMatched fields = - use conn = Configuration.dbConn () - WithConn.Count.byFields tableName howMatched fields conn - - -/// Commands to determine if documents exist -[] -module Exists = - - /// Determine if a document exists for the given ID - /// The table in which existence should be checked (may include schema) - /// The ID of the document whose existence should be checked - /// True if a document exists, false if not - [] - let byId tableName (docId: 'TKey) = - use conn = Configuration.dbConn () - WithConn.Exists.byId tableName docId conn - - /// Determine if a document exists using JSON field comparisons (->> =, etc.) - /// The table in which existence should be checked (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// True if any matching documents exist, false if not - [] - let byFields tableName howMatched fields = - use conn = Configuration.dbConn () - WithConn.Exists.byFields tableName howMatched fields conn - - -/// Commands to retrieve documents -[] -module Find = - - /// Retrieve all documents in the given table - /// The table from which documents should be retrieved (may include schema) - /// All documents from the given table - [] - let all<'TDoc> tableName = - use conn = Configuration.dbConn () - WithConn.Find.all<'TDoc> tableName conn - - /// Retrieve all documents in the given table - /// The table from which documents should be retrieved (may include schema) - /// All documents from the given table - let All<'TDoc> tableName = - use conn = Configuration.dbConn () - WithConn.Find.All<'TDoc>(tableName, conn) - - /// Retrieve all documents in the given table ordered by the given fields in the document - /// The table from which documents should be retrieved (may include schema) - /// Fields by which the results should be ordered - /// All documents from the given table, ordered by the given fields - [] - let allOrdered<'TDoc> tableName orderFields = - use conn = Configuration.dbConn () - WithConn.Find.allOrdered<'TDoc> tableName orderFields conn - - /// Retrieve all documents in the given table ordered by the given fields in the document - /// The table from which documents should be retrieved (may include schema) - /// Fields by which the results should be ordered - /// All documents from the given table, ordered by the given fields - let AllOrdered<'TDoc> tableName orderFields = - use conn = Configuration.dbConn () - WithConn.Find.AllOrdered<'TDoc>(tableName, orderFields, conn) - - /// Retrieve a document by its ID - /// The table from which a document should be retrieved (may include schema) - /// The ID of the document to retrieve - /// Some with the document if found, None otherwise - [] - let byId<'TKey, 'TDoc> tableName docId = - use conn = Configuration.dbConn () - WithConn.Find.byId<'TKey, 'TDoc> tableName docId conn - - /// Retrieve a document by its ID - /// The table from which a document should be retrieved (may include schema) - /// The ID of the document to retrieve - /// The document if found, null otherwise - let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId) = - use conn = Configuration.dbConn () - WithConn.Find.ById<'TKey, 'TDoc>(tableName, docId, conn) - - /// Retrieve documents matching JSON field comparisons (->> =, etc.) - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// All documents matching the given fields - [] - let byFields<'TDoc> tableName howMatched fields = - use conn = Configuration.dbConn () - WithConn.Find.byFields<'TDoc> tableName howMatched fields conn - - /// Retrieve documents matching JSON field comparisons (->> =, etc.) - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// All documents matching the given fields - let ByFields<'TDoc>(tableName, howMatched, fields) = - use conn = Configuration.dbConn () - WithConn.Find.ByFields<'TDoc>(tableName, howMatched, fields, conn) - - /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in - /// the document - /// - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// All documents matching the given fields, ordered by the other given fields - [] - let byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = - use conn = Configuration.dbConn () - WithConn.Find.byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn - - /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in - /// the document - /// - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// All documents matching the given fields, ordered by the other given fields - let ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields) = - use conn = Configuration.dbConn () - WithConn.Find.ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) - - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Some with the first document, or None if not found - [] - let firstByFields<'TDoc> tableName howMatched fields = - use conn = Configuration.dbConn () - WithConn.Find.firstByFields<'TDoc> tableName howMatched fields conn - - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The first document, or null if not found - let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields) = - use conn = Configuration.dbConn () - WithConn.Find.FirstByFields<'TDoc>(tableName, howMatched, fields, conn) - - /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given - /// fields in the document - /// - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// - /// Some with the first document ordered by the given fields, or None if not found - /// - [] - let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = - use conn = Configuration.dbConn () - WithConn.Find.firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn - - /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given - /// fields in the document - /// - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found - let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( - tableName, howMatched, queryFields, orderFields) = - use conn = Configuration.dbConn () - WithConn.Find.FirstByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) - - -/// Commands to update documents -[] -module Update = - - /// Update (replace) an entire document by its ID - /// The table in which a document should be updated (may include schema) - /// The ID of the document to be updated (replaced) - /// The new document - [] - let byId tableName (docId: 'TKey) (document: 'TDoc) = - use conn = Configuration.dbConn () - WithConn.Update.byId tableName docId document conn - - /// - /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document - /// - /// The table in which a document should be updated (may include schema) - /// The function to obtain the ID of the document - /// The new document - [] - let byFunc tableName (idFunc: 'TDoc -> 'TKey) (document: 'TDoc) = - use conn = Configuration.dbConn () - WithConn.Update.byFunc tableName idFunc document conn - - /// - /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document - /// - /// The table in which a document should be updated (may include schema) - /// The function to obtain the ID of the document - /// The new document - let ByFunc(tableName, idFunc: System.Func<'TDoc, 'TKey>, document: 'TDoc) = - use conn = Configuration.dbConn () - WithConn.Update.ByFunc(tableName, idFunc, document, conn) - - -/// Commands to patch (partially update) documents -[] -module Patch = - - /// Patch a document by its ID - /// The table in which a document should be patched (may include schema) - /// The ID of the document to patch - /// The partial document to patch the existing document - [] - let byId tableName (docId: 'TKey) (patch: 'TPatch) = - use conn = Configuration.dbConn () - WithConn.Patch.byId tableName docId patch conn - - /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) - /// - /// The table in which documents should be patched (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The partial document to patch the existing document - [] - let byFields tableName howMatched fields (patch: 'TPatch) = - use conn = Configuration.dbConn () - WithConn.Patch.byFields tableName howMatched fields patch conn - - -/// Commands to remove fields from documents -[] -module RemoveFields = - - /// Remove fields from a document by the document's ID - /// The table in which a document should be modified (may include schema) - /// The ID of the document to modify - /// One or more field names to remove from the document - [] - let byId tableName (docId: 'TKey) fieldNames = - use conn = Configuration.dbConn () - WithConn.RemoveFields.byId tableName docId fieldNames conn - - /// Remove fields from documents via a comparison on JSON fields in the document - /// The table in which documents should be modified (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// One or more field names to remove from the matching documents - [] - let byFields tableName howMatched fields fieldNames = - use conn = Configuration.dbConn () - WithConn.RemoveFields.byFields tableName howMatched fields fieldNames conn - - -/// Commands to delete documents -[] -module Delete = - - /// Delete a document by its ID - /// The table in which a document should be deleted (may include schema) - /// The ID of the document to delete - [] - let byId tableName (docId: 'TKey) = - use conn = Configuration.dbConn () - WithConn.Delete.byId tableName docId conn - - /// Delete documents by matching a JSON field comparison query (->> =, etc.) - /// The table in which documents should be deleted (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - [] - let byFields tableName howMatched fields = - use conn = Configuration.dbConn () - WithConn.Delete.byFields tableName howMatched fields conn diff --git a/src/Sqlite/README.md b/src/Sqlite/README.md index fdd8a46..7c679f8 100644 --- a/src/Sqlite/README.md +++ b/src/Sqlite/README.md @@ -13,7 +13,7 @@ This package provides a lightweight document library backed by [SQLite](https:// ## Upgrading from v3 -There is a breaking API change for `ByField` (C#) / `byField` (F#), along with a compatibility namespace that can mitigate the impact of these changes. See [the migration guide](https://bitbadger.solutions/open-source/relational-documents/upgrade-from-v3-to-v4.html) for full details. +There is a breaking API change for `ByField` (C#) / `byField` (F#), along with a compatibility namespace that can mitigate the impact of these changes. See [the migration guide](https://relationaldocs.bitbadger.solutions/dotnet/upgrade/v4.html) for full details. ## Getting Started @@ -103,4 +103,4 @@ do! Delete.byFields "customer" Any [ Field.Equal "City" "Chicago" ] ## More Information -The [project site](https://bitbadger.solutions/open-source/relational-documents/) has full details on how to use this library. +The [project site](https://relationaldocs.bitbadger.solutions/dotnet/) has full details on how to use this library. diff --git a/src/Sqlite/WithConn.fs b/src/Sqlite/WithConn.fs new file mode 100644 index 0000000..d0deeb9 --- /dev/null +++ b/src/Sqlite/WithConn.fs @@ -0,0 +1,784 @@ +/// Versions of queries that accept a SqliteConnection as the last parameter +module BitBadger.Documents.Sqlite.WithConn + +open BitBadger.Documents +open Microsoft.Data.Sqlite + +/// Commands to execute custom SQL queries +[] +module Custom = + + /// Execute a query that returns a list of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// The SqliteConnection to use to execute the query + /// A list of results for the given query + [] + let list<'TDoc> query (parameters: SqliteParameter seq) (mapFunc: SqliteDataReader -> 'TDoc) + (conn: SqliteConnection) = + use cmd = conn.CreateCommand() + cmd.CommandText <- query + cmd.Parameters.AddRange parameters + toCustomList<'TDoc> cmd mapFunc + + /// Execute a query that returns a list of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// The SqliteConnection to use to execute the query + /// A list of results for the given query + let List<'TDoc>( + query, parameters: SqliteParameter seq, mapFunc: System.Func, + conn: SqliteConnection + ) = + use cmd = conn.CreateCommand() + cmd.CommandText <- query + cmd.Parameters.AddRange parameters + ToCustomList<'TDoc>(cmd, mapFunc) + + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The SqliteConnection to use to execute the query + /// A JSON array of results for the given query + [] + let jsonArray + query + (parameters: SqliteParameter seq) + (mapFunc: SqliteDataReader -> string) + (conn: SqliteConnection) = + use cmd = conn.CreateCommand() + cmd.CommandText <- query + cmd.Parameters.AddRange parameters + toJsonArray cmd mapFunc + + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The SqliteConnection to use to execute the query + /// A JSON array of results for the given query + let JsonArray(query, parameters, mapFunc: System.Func, conn) = + jsonArray query parameters mapFunc.Invoke conn + + /// Execute a query, writing its results to the given PipeWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The PipeWriter to which the results should be written + /// The mapping function to extract the document + /// The SqliteConnection to use to execute the query + [] + let writeJsonArray + query + (parameters: SqliteParameter seq) + writer + (mapFunc: SqliteDataReader -> string) + (conn: SqliteConnection) = + use cmd = conn.CreateCommand() + cmd.CommandText <- query + cmd.Parameters.AddRange parameters + writeJsonArray cmd writer mapFunc + + /// Execute a query, writing its results to the given PipeWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The PipeWriter to which the results should be written + /// The mapping function to extract the document + /// The SqliteConnection to use to execute the query + let WriteJsonArray(query, parameters, writer, mapFunc: System.Func, conn) = + writeJsonArray query parameters writer mapFunc.Invoke conn + + /// Execute a query that returns one or no results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// The SqliteConnection to use to execute the query + /// Some with the first matching result, or None if not found + [] + let single<'TDoc> query parameters (mapFunc: SqliteDataReader -> 'TDoc) conn = backgroundTask { + let! results = list query parameters mapFunc conn + return FSharp.Collections.List.tryHead results + } + + /// Execute a query that returns one or no results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// The SqliteConnection to use to execute the query + /// The first matching result, or null if not found + let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>( + query, parameters, mapFunc: System.Func, conn + ) = backgroundTask { + let! result = single<'TDoc> query parameters mapFunc.Invoke conn + return Option.toObj result + } + + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The SqliteConnection to use to execute the query + /// The JSON document with the first matching result, or an empty document if not found + [] + let jsonSingle query parameters mapFunc conn = backgroundTask { + let! results = jsonArray $"%s{query} LIMIT 1" parameters mapFunc conn + return if results = "[]" then "{}" else results[1..results.Length - 2] + } + + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The SqliteConnection to use to execute the query + /// The JSON document with the first matching result, or an empty document if not found + let JsonSingle(query, parameters, mapFunc: System.Func, conn) = + jsonSingle query parameters mapFunc.Invoke conn + + /// Execute a query that returns no results + /// The query to retrieve the results + /// Parameters to use for the query + /// The SqliteConnection to use to execute the query + [] + let nonQuery query (parameters: SqliteParameter seq) (conn: SqliteConnection) = + use cmd = conn.CreateCommand() + cmd.CommandText <- query + cmd.Parameters.AddRange parameters + write cmd + + /// Execute a query that returns a scalar value + /// The query to retrieve the value + /// Parameters to use for the query + /// The mapping function to obtain the value + /// The SqliteConnection to use to execute the query + /// The scalar value for the query + [] + let scalar<'T when 'T : struct> query (parameters: SqliteParameter seq) (mapFunc: SqliteDataReader -> 'T) + (conn: SqliteConnection) = backgroundTask { + use cmd = conn.CreateCommand() + cmd.CommandText <- query + cmd.Parameters.AddRange parameters + use! rdr = cmd.ExecuteReaderAsync() + let! isFound = rdr.ReadAsync() + return if isFound then mapFunc rdr else Unchecked.defaultof<'T> + } + + /// Execute a query that returns a scalar value + /// The query to retrieve the value + /// Parameters to use for the query + /// The mapping function to obtain the value + /// The SqliteConnection to use to execute the query + /// The scalar value for the query + let Scalar<'T when 'T: struct>(query, parameters, mapFunc: System.Func, conn) = + scalar<'T> query parameters mapFunc.Invoke conn + + +/// Functions to create tables and indexes +[] +module Definition = + + /// Create a document table + /// The table whose existence should be ensured (may include schema) + /// The SqliteConnection to use to execute the query + [] + let ensureTable name conn = backgroundTask { + do! Custom.nonQuery (Query.Definition.ensureTable name) [] conn + do! Custom.nonQuery (Query.Definition.ensureKey name SQLite) [] conn + } + + /// Create an index on field(s) within documents in the specified table + /// The table to be indexed (may include schema) + /// The name of the index to create + /// One or more fields to be indexed + /// The SqliteConnection to use to execute the query + [] + let ensureFieldIndex tableName indexName fields conn = + Custom.nonQuery (Query.Definition.ensureIndexOn tableName indexName fields SQLite) [] conn + +/// Commands to add documents +[] +module Document = + + /// Insert a new document + /// The table into which the document should be inserted (may include schema) + /// The document to be inserted + /// The SqliteConnection to use to execute the query + [] + let insert<'TDoc> tableName (document: 'TDoc) conn = + let query = + match Configuration.autoIdStrategy () with + | Disabled -> Query.insert tableName + | strategy -> + let idField = Configuration.idField () + let dataParam = + if AutoId.NeedsAutoId strategy document idField then + match strategy with + | Number -> $"(SELECT coalesce(max(data->>'{idField}'), 0) + 1 FROM {tableName})" + | Guid -> $"'{AutoId.GenerateGuid()}'" + | RandomString -> $"'{AutoId.GenerateRandomString(Configuration.idStringLength ())}'" + | Disabled -> "@data" + |> function it -> $"json_set(@data, '$.{idField}', {it})" + else "@data" + (Query.insert tableName).Replace("@data", dataParam) + Custom.nonQuery query [ jsonParam "@data" document ] conn + + /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") + /// The table into which the document should be saved (may include schema) + /// The document to be saved + /// The SqliteConnection to use to execute the query + [] + let save<'TDoc> tableName (document: 'TDoc) conn = + Custom.nonQuery (Query.save tableName) [ jsonParam "@data" document ] conn + + +/// Commands to count documents +[] +module Count = + + /// Count all documents in a table + /// The table in which documents should be counted (may include schema) + /// The SqliteConnection to use to execute the query + /// The count of the documents in the table + [] + let all tableName conn = + Custom.scalar (Query.count tableName) [] toCount conn + + /// Count matching documents using JSON field comparisons (->> =, etc.) + /// The table in which documents should be counted (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// The count of matching documents in the table + [] + let byFields tableName howMatched fields conn = + Custom.scalar + (Query.byFields (Query.count tableName) howMatched fields) (addFieldParams fields []) toCount conn + + +/// Commands to determine if documents exist +[] +module Exists = + + /// Determine if a document exists for the given ID + /// The table in which existence should be checked (may include schema) + /// The ID of the document whose existence should be checked + /// The SqliteConnection to use to execute the query + /// True if a document exists, false if not + [] + let byId tableName (docId: 'TKey) conn = + Custom.scalar (Query.exists tableName (Query.whereById docId)) [ idParam docId ] toExists conn + + /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// The table in which existence should be checked (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// True if any matching documents exist, false if not + [] + let byFields tableName howMatched fields conn = + Custom.scalar + (Query.exists tableName (Query.whereByFields howMatched fields)) + (addFieldParams fields []) + toExists + conn + + +/// Commands to retrieve documents as domain items +[] +module Find = + + /// Retrieve all documents in the given table + /// The table from which documents should be retrieved (may include schema) + /// The SqliteConnection to use to execute the query + /// All documents from the given table + [] + let all<'TDoc> tableName conn = + Custom.list<'TDoc> (Query.find tableName) [] fromData<'TDoc> conn + + /// Retrieve all documents in the given table + /// The table from which documents should be retrieved (may include schema) + /// The SqliteConnection to use to execute the query + /// All documents from the given table + let All<'TDoc>(tableName, conn) = + Custom.List(Query.find tableName, [], fromData<'TDoc>, conn) + + /// Retrieve all documents in the given table ordered by the given fields in the document + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// All documents from the given table, ordered by the given fields + [] + let allOrdered<'TDoc> tableName orderFields conn = + Custom.list<'TDoc> (Query.find tableName + Query.orderBy orderFields SQLite) [] fromData<'TDoc> conn + + /// Retrieve all documents in the given table ordered by the given fields in the document + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// All documents from the given table, ordered by the given fields + let AllOrdered<'TDoc>(tableName, orderFields, conn) = + Custom.List(Query.find tableName + Query.orderBy orderFields SQLite, [], fromData<'TDoc>, conn) + + /// Retrieve a document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The SqliteConnection to use to execute the query + /// Some with the document if found, None otherwise + [] + let byId<'TKey, 'TDoc> tableName (docId: 'TKey) conn = + Custom.single<'TDoc> (Query.byId (Query.find tableName) docId) [ idParam docId ] fromData<'TDoc> conn + + /// Retrieve a document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The SqliteConnection to use to execute the query + /// The document if found, null otherwise + let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId: 'TKey, conn) = + Custom.Single<'TDoc>(Query.byId (Query.find tableName) docId, [ idParam docId ], fromData<'TDoc>, conn) + + /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// All documents matching the given fields + [] + let byFields<'TDoc> tableName howMatched fields conn = + Custom.list<'TDoc> + (Query.byFields (Query.find tableName) howMatched fields) + (addFieldParams fields []) + fromData<'TDoc> + conn + + /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// All documents matching the given fields + let ByFields<'TDoc>(tableName, howMatched, fields, conn) = + Custom.List<'TDoc>( + Query.byFields (Query.find tableName) howMatched fields, + addFieldParams fields [], + fromData<'TDoc>, + conn) + + /// + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// All documents matching the given fields, ordered by the other given fields + [] + let byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn = + Custom.list<'TDoc> + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite) + (addFieldParams queryFields []) + fromData<'TDoc> + conn + + /// + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// All documents matching the given fields, ordered by the other given fields + let ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) = + Custom.List<'TDoc>( + Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite, + addFieldParams queryFields [], + fromData<'TDoc>, + conn) + + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// Some with the first document, or None if not found + [] + let firstByFields<'TDoc> tableName howMatched fields conn = + Custom.single + $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1" + (addFieldParams fields []) + fromData<'TDoc> + conn + + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// The first document, or null if not found + let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields, conn) = + Custom.Single( + $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1", + addFieldParams fields [], + fromData<'TDoc>, + conn) + + /// + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// + /// Some with the first document ordered by the given fields, or None if not found + /// + [] + let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn = + Custom.single + $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields SQLite} LIMIT 1" + (addFieldParams queryFields []) + fromData<'TDoc> + conn + + /// + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// The first document ordered by the given fields, or null if not found + let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( + tableName, howMatched, queryFields, orderFields, conn) = + Custom.Single( + $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields SQLite} LIMIT 1", + addFieldParams queryFields [], + fromData<'TDoc>, + conn) + + +/// Commands to retrieve documents as raw JSON +[] +module Json = + + /// Retrieve all JSON documents in the given table + /// The table from which documents should be retrieved (may include schema) + /// The SqliteConnection to use to execute the query + /// All JSON documents from the given table + [] + let all tableName conn = + Custom.jsonArray (Query.find tableName) [] jsonFromData conn + + /// Retrieve all JSON documents in the given table ordered by the given fields in the document + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// All JSON documents from the given table, ordered by the given fields + [] + let allOrdered tableName orderFields conn = + Custom.jsonArray (Query.find tableName + Query.orderBy orderFields SQLite) [] jsonFromData conn + + /// Retrieve a JSON document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The SqliteConnection to use to execute the query + /// The JSON document if found, an empty JSON document otherwise + [] + let byId<'TKey> tableName (docId: 'TKey) conn = + Custom.jsonSingle (Query.byId (Query.find tableName) docId) [ idParam docId ] jsonFromData conn + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// All JSON documents matching the given fields + [] + let byFields tableName howMatched fields conn = + Custom.jsonArray + (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) jsonFromData conn + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given fields + /// in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// All JSON documents matching the given fields, ordered by the other given fields + [] + let byFieldsOrdered tableName howMatched queryFields orderFields conn = + Custom.jsonArray + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite) + (addFieldParams queryFields []) + jsonFromData + conn + + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// The first JSON document if found, an empty JSON document otherwise + [] + let firstByFields tableName howMatched fields conn = + Custom.jsonSingle + (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) jsonFromData conn + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// The first JSON document (in order) if found, an empty JSON document otherwise + [] + let firstByFieldsOrdered tableName howMatched queryFields orderFields conn = + Custom.jsonSingle + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite) + (addFieldParams queryFields []) + jsonFromData + conn + + /// Write all JSON documents in the given table to the given PipeWriter + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The SqliteConnection to use to execute the query + [] + let writeAll tableName writer conn = + Custom.writeJsonArray (Query.find tableName) [] writer jsonFromData conn + + /// + /// Write all JSON all documents in the given table to the given PipeWriter, ordered by the given fields in + /// the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + [] + let writeAllOrdered tableName writer orderFields conn = + Custom.writeJsonArray (Query.find tableName + Query.orderBy orderFields SQLite) [] writer jsonFromData conn + + /// Write a JSON document to the given PipeWriter by its ID + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// The ID of the document to retrieve + /// The SqliteConnection to use to execute the query + [] + let writeById<'TKey> tableName writer (docId: 'TKey) conn = backgroundTask { + let! json = Custom.jsonSingle (Query.byId (Query.find tableName) docId) [ idParam docId ] jsonFromData conn + let! _ = PipeWriter.writeString writer json + () + } + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + [] + let writeByFields tableName writer howMatched fields conn = + Custom.writeJsonArray + (Query.byFields (Query.find tableName) howMatched fields) + (addFieldParams fields []) + writer + jsonFromData + conn + + /// + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) + /// ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + [] + let writeByFieldsOrdered tableName writer howMatched queryFields orderFields conn = + Custom.writeJsonArray + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite) + (addFieldParams queryFields []) + writer + jsonFromData + conn + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons + /// (->> =, etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + [] + let writeFirstByFields tableName writer howMatched fields conn = backgroundTask { + let! json = + Custom.jsonSingle + (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) jsonFromData conn + let! _ = PipeWriter.writeString writer json + () + } + + /// + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The PipeWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + [] + let writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields conn = backgroundTask { + let! json = + Custom.jsonSingle + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite) + (addFieldParams queryFields []) + jsonFromData + conn + let! _ = PipeWriter.writeString writer json + () + } + + +/// Commands to update documents +[] +module Update = + + /// Update (replace) an entire document by its ID + /// The table in which a document should be updated (may include schema) + /// The ID of the document to be updated (replaced) + /// The new document + /// The SqliteConnection to use to execute the query + [] + let byId tableName (docId: 'TKey) (document: 'TDoc) conn = + Custom.nonQuery + (Query.statementWhere (Query.update tableName) (Query.whereById docId)) + [ idParam docId; jsonParam "@data" document ] + conn + + /// + /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document + /// + /// The table in which a document should be updated (may include schema) + /// The function to obtain the ID of the document + /// The new document + /// The SqliteConnection to use to execute the query + [] + let byFunc tableName (idFunc: 'TDoc -> 'TKey) (document: 'TDoc) conn = + byId tableName (idFunc document) document conn + + /// + /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document + /// + /// The table in which a document should be updated (may include schema) + /// The function to obtain the ID of the document + /// The new document + /// The SqliteConnection to use to execute the query + let ByFunc(tableName, idFunc: System.Func<'TDoc, 'TKey>, document: 'TDoc, conn) = + byFunc tableName idFunc.Invoke document conn + + +/// Commands to patch (partially update) documents +[] +module Patch = + + /// Patch a document by its ID + /// The table in which a document should be patched (may include schema) + /// The ID of the document to patch + /// The partial document to patch the existing document + /// The SqliteConnection to use to execute the query + [] + let byId tableName (docId: 'TKey) (patch: 'TPatch) conn = + Custom.nonQuery + (Query.byId (Query.patch tableName) docId) [ idParam docId; jsonParam "@data" patch ] conn + + /// + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) + /// + /// The table in which documents should be patched (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The partial document to patch the existing document + /// The SqliteConnection to use to execute the query + [] + let byFields tableName howMatched fields (patch: 'TPatch) conn = + Custom.nonQuery + (Query.byFields (Query.patch tableName) howMatched fields) + (addFieldParams fields [ jsonParam "@data" patch ]) + conn + + +/// Commands to remove fields from documents +[] +module RemoveFields = + + /// Remove fields from a document by the document's ID + /// The table in which a document should be modified (may include schema) + /// The ID of the document to modify + /// One or more field names to remove from the document + /// The SqliteConnection to use to execute the query + [] + let byId tableName (docId: 'TKey) fieldNames conn = + let nameParams = fieldNameParams "@name" fieldNames + Custom.nonQuery + (Query.byId (Query.removeFields tableName nameParams) docId) + (idParam docId |> Seq.singleton |> Seq.append nameParams) + conn + + /// Remove fields from documents via a comparison on JSON fields in the document + /// The table in which documents should be modified (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// One or more field names to remove from the matching documents + /// The SqliteConnection to use to execute the query + [] + let byFields tableName howMatched fields fieldNames conn = + let nameParams = fieldNameParams "@name" fieldNames + Custom.nonQuery + (Query.byFields (Query.removeFields tableName nameParams) howMatched fields) + (addFieldParams fields nameParams) + conn + + +/// Commands to delete documents +[] +module Delete = + + /// Delete a document by its ID + /// The table in which a document should be deleted (may include schema) + /// The ID of the document to delete + /// The SqliteConnection to use to execute the query + [] + let byId tableName (docId: 'TKey) conn = + Custom.nonQuery (Query.byId (Query.delete tableName) docId) [ idParam docId ] conn + + /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// The table in which documents should be deleted (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + [] + let byFields tableName howMatched fields conn = + Custom.nonQuery (Query.byFields (Query.delete tableName) howMatched fields) (addFieldParams fields []) conn diff --git a/src/Tests.CSharp/BitBadger.Documents.Tests.CSharp.csproj b/src/Tests.CSharp/BitBadger.Documents.Tests.CSharp.csproj index b231560..ae872f4 100644 --- a/src/Tests.CSharp/BitBadger.Documents.Tests.CSharp.csproj +++ b/src/Tests.CSharp/BitBadger.Documents.Tests.CSharp.csproj @@ -4,6 +4,7 @@ enable enable latest + 1591 diff --git a/src/Tests.CSharp/CommonCSharpTests.cs b/src/Tests.CSharp/CommonCSharpTests.cs index 911b732..0ed9d67 100644 --- a/src/Tests.CSharp/CommonCSharpTests.cs +++ b/src/Tests.CSharp/CommonCSharpTests.cs @@ -1,3 +1,4 @@ +using System.IO.Pipelines; using Expecto.CSharp; using Expecto; using Microsoft.FSharp.Core; @@ -16,7 +17,7 @@ internal class TestSerializer : IDocumentSerializer } /// -/// C# Tests for common functionality in BitBadger.Documents +/// C# Tests for common functionality in BitBadger.Documents /// public static class CommonCSharpTests { @@ -417,7 +418,7 @@ public static class CommonCSharpTests }) ]) ]); - + /// /// Unit tests for the Configuration static class /// @@ -647,7 +648,115 @@ public static class CommonCSharpTests }) ]) ]); - + + private static string StreamText(Stream stream) + { + stream.Position = 0L; + using StreamReader reader = new(stream); + return reader.ReadToEnd(); + } + + /// Unit tests for the PipeWriter module + private static readonly Test PipeWriterTests = TestList("PipeWriterModule", + [ + TestList("WriteString", + [ + TestCase("succeeds when writer is open", async () => + { + await using MemoryStream stream = new(); + var writer = PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + try + { + var result = await PipeWriterModule.WriteString(writer, "abc"); + Expect.isTrue(result, "The write operation should have been successful"); + Expect.equal(StreamText(stream), "abc", "The string was not written correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when writer is completed", async () => + { + await using MemoryStream stream = new(); + var writer = PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + await writer.CompleteAsync(); + + var result = await PipeWriterModule.WriteString(writer, "abc"); + Expect.isFalse(result, "The write operation should have returned false"); + Expect.equal(StreamText(stream), "", "No text should have been written"); + }) + ]), + TestList("WriteStrings", + [ + TestCase("succeeds with no strings", async () => + { + await using MemoryStream stream = new(); + var writer = PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + try + { + await PipeWriterModule.WriteStrings(writer, []); + Expect.equal(StreamText(stream), "[]", "An empty sequence of strings was not written correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds with one strings", async () => + { + await using MemoryStream stream = new(); + var writer = PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + try + { + await PipeWriterModule.WriteStrings(writer, ["le-test"]); + Expect.equal(StreamText(stream), "[le-test]", "A sequence of one string was not written correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds with many strings", async () => + { + await using MemoryStream stream = new(); + var writer = PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + + try + { + await PipeWriterModule.WriteStrings(writer, ["z", "y", "x", "c", "b", "a"]); + Expect.equal(StreamText(stream), "[z,y,x,c,b,a]", + "A sequence of many strings was not written correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when the writer is completed early", async () => + { + await using MemoryStream stream = new(); + var writer = PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + + await PipeWriterModule.WriteStrings(writer, Items()); + Expect.equal(StreamText(stream), "[a,b,c", "The writing should have stopped when the writer completed"); + return; + + IEnumerable Items() + { + yield return "a"; + yield return "b"; + yield return "c"; + writer.Complete(); + yield return "d"; + yield return "e"; + yield return "f"; + } + }) + ]) + ]); + + /// /// Unit tests /// @@ -660,6 +769,7 @@ public static class CommonCSharpTests ParameterNameTests, AutoIdTests, QueryTests, + PipeWriterTests, TestSequenced(ConfigurationTests) ]); } diff --git a/src/Tests.CSharp/PostgresCSharpExtensionTests.cs b/src/Tests.CSharp/PostgresCSharpExtensionTests.cs index 65b0743..d665106 100644 --- a/src/Tests.CSharp/PostgresCSharpExtensionTests.cs +++ b/src/Tests.CSharp/PostgresCSharpExtensionTests.cs @@ -1,3 +1,4 @@ +using System.IO.Pipelines; using Expecto.CSharp; using Expecto; using BitBadger.Documents.Postgres; @@ -9,11 +10,14 @@ using static CommonExtensionsAndTypesForNpgsqlFSharp; using static Runner; /// -/// C# tests for the extensions on the NpgsqlConnection type +/// C# tests for the extensions on the NpgsqlConnection type /// public class PostgresCSharpExtensionTests { - private static Task LoadDocs() => PostgresCSharpTests.LoadDocs(); + private static async Task LoadDocs(NpgsqlConnection conn) + { + foreach (var doc in JsonDocument.TestDocuments) await conn.Insert(SqliteDb.TableName, doc); + } /// /// Create a connection to the throwaway database @@ -27,6 +31,83 @@ public class PostgresCSharpExtensionTests return conn; } + /// Set up a stream writer for a test + private static PipeWriter WriteStream(Stream stream) => + PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + + /// Get the text of the given stream + private static string StreamText(Stream stream) + { + stream.Position = 0L; + using StreamReader reader = new(stream); + return reader.ReadToEnd(); + } + + /// Verify a JSON array begins with "[" and ends with "]" + private static void VerifyBeginEnd(string json) + { + Expect.stringStarts(json, "[", "The array should have started with `[`"); + Expect.stringEnds(json, "]", "The array should have ended with `]`"); + } + + /// Verify the presence of a document by its ID + private static void VerifyDocById(string json, string docId) => + Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); + + /// Verify the presence of a document by its ID + private static void VerifySingleById(string json, string docId) + { + VerifyBeginEnd(json); + Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); + } + + /// Verify the presence of any of the given document IDs in the given JSON + private static void VerifyAnyById(string json, IEnumerable docIds) + { + var theIds = docIds.ToList(); + if (theIds.Any(it => json.Contains($"{{\"Id\": \"{it}\""))) return; + var ids = string.Join(", ", theIds); + Expect.isTrue(false, $"Could not find any of IDs {ids} in {json}"); + } + + /// Verify the JSON for `all` returning data + private static void VerifyAllData(string json) + { + VerifyBeginEnd(json); + IEnumerable ids = ["one", "two", "three", "four", "five"]; + foreach (var docId in ids) VerifyDocById(json, docId); + } + + /// Verify an empty JSON array + private static void VerifyEmpty(string json) => + Expect.equal(json, "[]", "There should be no documents returned"); + + /// Verify an empty JSON document + private static void VerifyNoDoc(string json) => + Expect.equal(json, "{}", "There should be no document returned"); + + /// Verify the JSON for an ordered query + private static void VerifyExpectedOrder(string json, string idFirst, string idSecond, string? idThird = null, + string? idFourth = null, string? idFifth = null) + { + var firstIdx = json.IndexOf($"{{\"Id\": \"{idFirst}\",", StringComparison.Ordinal); + var secondIdx = json.IndexOf($"{{\"Id\": \"{idSecond}\",", StringComparison.Ordinal); + VerifyBeginEnd(json); + Expect.isGreaterThan(secondIdx, firstIdx, $"`{idSecond}` should have been after `{idFirst}`"); + if (idThird is null) return; + + var thirdIdx = json.IndexOf($"{{\"Id\": \"{idThird}\",", StringComparison.Ordinal); + Expect.isGreaterThan(thirdIdx, secondIdx, $"`{idThird}` should have been after `{idSecond}`"); + if (idFourth is null) return; + + var fourthIdx = json.IndexOf($"{{\"Id\": \"{idFourth}\",", StringComparison.Ordinal); + Expect.isGreaterThan(fourthIdx, thirdIdx, $"`{idFourth}` should have been after `{idThird}`"); + if (idFifth is null) return; + + var fifthIdx = json.IndexOf($"{{\"Id\": \"{idFifth}\",", StringComparison.Ordinal); + Expect.isGreaterThan(fifthIdx, fourthIdx, $"`{idFifth}` should have been after `{idFourth}`"); + } + /// /// Integration tests for the SQLite extension methods /// @@ -39,7 +120,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.CustomList(Query.Find(PostgresDb.TableName), Parameters.None, Results.FromData); @@ -49,7 +130,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.CustomList( $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", @@ -58,13 +139,82 @@ public class PostgresCSharpExtensionTests Expect.isEmpty(docs, "There should have been no documents returned"); }) ]), + TestList("CustomJsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + var docs = await conn.CustomJsonArray(Query.Find(PostgresDb.TableName), Parameters.None, + Results.JsonFromData); + VerifyBeginEnd(docs); + Expect.hasLength(docs.Split("{\"Id\":"), 6, "There should have been 5 documents returned"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + var docs = await conn.CustomJsonArray( + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", + [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))], Results.JsonFromData); + VerifyEmpty(docs); + }) + ]), + TestList("WriteJsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteCustomJsonArray(Query.Find(PostgresDb.TableName), Parameters.None, writer, + Results.JsonFromData); + var docs = StreamText(stream); + VerifyBeginEnd(docs); + Expect.hasLength(docs.Split("{\"Id\":"), 6, "There should have been 5 documents returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteCustomJsonArray( + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", + [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))], writer, Results.JsonFromData); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), TestList("CustomSingle", [ TestCase("succeeds when a row is found", async () => { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.CustomSingle($"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", [Tuple.Create("@id", Sql.@string("one"))], Results.FromData); @@ -75,23 +225,50 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.CustomSingle($"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", [Tuple.Create("@id", Sql.@string("eighty"))], Results.FromData); Expect.isNull(doc, "There should not have been a document returned"); }) ]), + TestList("CustomJsonSingle", + [ + TestCase("succeeds when a row is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + var doc = await conn.CustomJsonSingle( + $"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", + [Tuple.Create("@id", Sql.@string("one"))], Results.JsonFromData); + Expect.stringStarts(doc, "{", "The document should have started with an open brace"); + Expect.stringContains(doc, "\"Id\": \"one\"", "An incorrect document was returned"); + Expect.stringEnds(doc, "}", "The document should have ended with a closing brace"); + }), + TestCase("succeeds when a row is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + var doc = await conn.CustomJsonSingle( + $"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", + [Tuple.Create("@id", Sql.@string("eighty"))], Results.JsonFromData); + Expect.equal(doc, "{}", "There should not have been a document returned"); + }) + ]), TestList("CustomNonQuery", [ TestCase("succeeds when operating on data", async () => { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.CustomNonQuery($"DELETE FROM {PostgresDb.TableName}", Parameters.None); - + var remaining = await conn.CountAll(PostgresDb.TableName); Expect.equal(remaining, 0, "There should be no documents remaining in the table"); }), @@ -99,11 +276,11 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.CustomNonQuery($"DELETE FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))]); - + var remaining = await conn.CountAll(PostgresDb.TableName); Expect.equal(remaining, 5, "There should be 5 documents remaining in the table"); }) @@ -183,7 +360,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should be no documents in the table"); - + await conn.Insert(PostgresDb.TableName, new JsonDocument { Id = "turkey", Sub = new() { Foo = "gobble", Bar = "gobble" } }); var after = await conn.FindAll(PostgresDb.TableName); @@ -213,7 +390,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should be no documents in the table"); - + await conn.Save(PostgresDb.TableName, new JsonDocument { Id = "test", Sub = new() { Foo = "a", Bar = "b" } }); var after = await conn.FindAll(PostgresDb.TableName); @@ -229,7 +406,7 @@ public class PostgresCSharpExtensionTests var before = await conn.FindById(PostgresDb.TableName, "test"); Expect.isNotNull(before, "There should have been a document returned"); Expect.equal(before.Id, "test", "The document is not correct"); - + await conn.Save(PostgresDb.TableName, new JsonDocument { Id = "test", Sub = new() { Foo = "c", Bar = "d" } }); var after = await conn.FindById(PostgresDb.TableName, "test"); @@ -241,8 +418,8 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); - + await LoadDocs(conn); + var theCount = await conn.CountAll(PostgresDb.TableName); Expect.equal(theCount, 5, "There should have been 5 matching documents"); }), @@ -250,7 +427,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var theCount = await conn.CountByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")]); @@ -260,7 +437,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var theCount = await conn.CountByContains(PostgresDb.TableName, new { Value = "purple" }); Expect.equal(theCount, 2, "There should have been 2 matching documents"); @@ -269,7 +446,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var theCount = await conn.CountByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ > 5)"); Expect.equal(theCount, 3, "There should have been 3 matching documents"); @@ -280,7 +457,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsById(PostgresDb.TableName, "three"); Expect.isTrue(exists, "There should have been an existing document"); @@ -289,7 +466,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsById(PostgresDb.TableName, "seven"); Expect.isFalse(exists, "There should not have been an existing document"); @@ -301,7 +478,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Exists("Sub")]); Expect.isTrue(exists, "There should have been existing documents"); @@ -310,7 +487,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("NumValue", "six")]); @@ -323,7 +500,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByContains(PostgresDb.TableName, new { NumValue = 10 }); Expect.isTrue(exists, "There should have been existing documents"); @@ -332,7 +509,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByContains(PostgresDb.TableName, new { Nothing = "none" }); Expect.isFalse(exists, "There should not have been any existing documents"); @@ -344,7 +521,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByJsonPath(PostgresDb.TableName, "$.Sub.Foo ? (@ == \"green\")"); Expect.isTrue(exists, "There should have been existing documents"); @@ -353,7 +530,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ > 1000)"); Expect.isFalse(exists, "There should not have been any existing documents"); @@ -387,7 +564,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var results = await conn.FindAllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue")]); @@ -399,7 +576,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var results = await conn.FindAllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue DESC")]); @@ -411,7 +588,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var results = await conn.FindAllOrdered(PostgresDb.TableName, [Field.Named("Id DESC")]); Expect.hasLength(results, 5, "There should have been 5 documents returned"); @@ -425,7 +602,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindById(PostgresDb.TableName, "two"); Expect.isNotNull(doc, "There should have been a document returned"); @@ -435,7 +612,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindById(PostgresDb.TableName, "three hundred eighty-seven"); Expect.isNull(doc, "There should not have been a document returned"); @@ -447,7 +624,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "another")]); @@ -457,7 +634,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "mauve")]); @@ -470,7 +647,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], [Field.Named("Id")]); @@ -482,7 +659,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); @@ -497,7 +674,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }); @@ -507,7 +684,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByContains(PostgresDb.TableName, new { Value = "mauve" }); Expect.isEmpty(docs, "There should have been no documents returned"); @@ -520,7 +697,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, [Field.Named("Sub.Bar")]); @@ -532,7 +709,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, [Field.Named("Sub.Bar DESC")]); @@ -547,7 +724,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 15)"); Expect.equal(docs.Count, 3, "There should have been 3 documents returned"); @@ -556,7 +733,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 0)"); Expect.isEmpty(docs, "There should have been no documents returned"); @@ -569,7 +746,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", [Field.Named("n:NumValue")]); @@ -581,7 +758,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", [Field.Named("n:NumValue DESC")]); @@ -596,7 +773,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "another")]); @@ -607,7 +784,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")]); @@ -618,7 +795,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "absent")]); @@ -631,7 +808,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], [Field.Named("Id")]); @@ -642,7 +819,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); @@ -656,7 +833,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByContains(PostgresDb.TableName, new { Value = "another" }); Expect.isNotNull(doc, "There should have been a document returned"); @@ -666,7 +843,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }); @@ -677,7 +854,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByContains(PostgresDb.TableName, new { Value = "absent" }); Expect.isNull(doc, "There should not have been a document returned"); @@ -689,7 +866,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, [Field.Named("Value")]); @@ -700,7 +877,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, [Field.Named("Value DESC")]); @@ -714,7 +891,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByJsonPath(PostgresDb.TableName, "$.Value ? (@ == \"FIRST!\")"); @@ -725,7 +902,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByJsonPath(PostgresDb.TableName, "$.Sub.Foo ? (@ == \"green\")"); @@ -736,7 +913,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByJsonPath(PostgresDb.TableName, "$.Id ? (@ == \"nope\")"); Expect.isNull(doc, "There should not have been a document returned"); @@ -748,7 +925,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByJsonPathOrdered(PostgresDb.TableName, "$.Sub.Foo ? (@ == \"green\")", [Field.Named("Sub.Bar")]); @@ -759,7 +936,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByJsonPathOrdered(PostgresDb.TableName, "$.Sub.Foo ? (@ == \"green\")", [Field.Named("Sub.Bar DESC")]); @@ -767,13 +944,1110 @@ public class PostgresCSharpExtensionTests Expect.equal("four", doc.Id, "An incorrect document was returned"); }) ]), + TestList("JsonAll", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyAllData(await conn.JsonAll(PostgresDb.TableName)); + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + VerifyEmpty(await conn.JsonAll(PostgresDb.TableName)); + }) + ]), + TestList("JsonAllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder(await conn.JsonAllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue")]), + "one", "three", "two", "four", "five"); + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder(await conn.JsonAllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue DESC")]), + "five", "four", "two", "three", "one"); + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder(await conn.JsonAllOrdered(PostgresDb.TableName, [Field.Named("Id DESC")]), + "two", "three", "one", "four", "five"); + }) + ]), + TestList("JsonById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + var json = await conn.JsonById(PostgresDb.TableName, "two"); + Expect.stringStarts(json, """{"Id": "two",""", "An incorrect document was returned"); + Expect.stringEnds(json, "}", "JSON should have ended with this document"); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyNoDoc(await conn.JsonById(PostgresDb.TableName, "three hundred eighty-seven")); + }) + ]), + TestList("JsonByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifySingleById( + await conn.JsonByFields(PostgresDb.TableName, FieldMatch.All, + [Field.In("Value", ["purple", "blue"]), Field.Exists("Sub")]), + "four"); + }), + TestCase("succeeds when documents are found using IN with numeric field", async() => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifySingleById( + await conn.JsonByFields(PostgresDb.TableName, FieldMatch.All, [Field.In("NumValue", [2, 4, 6, 8])]), + "three"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyEmpty(await conn.JsonByFields(PostgresDb.TableName, FieldMatch.All, + [Field.Equal("Value", "mauve"), Field.NotEqual("NumValue", 40)])); + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await conn.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(PostgresDb.TableName, doc); + + var json = await conn.JsonByFields(PostgresDb.TableName, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["c"])]); + VerifyBeginEnd(json); + VerifyDocById(json, "first"); + VerifyDocById(json, "second"); + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await conn.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(PostgresDb.TableName, doc); + VerifyEmpty(await conn.JsonByFields(PostgresDb.TableName, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["j"])])); + }) + ]), + TestList("JsonByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder( + await conn.JsonByFieldsOrdered(PostgresDb.TableName, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id")]), + "five", "four"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder( + await conn.JsonByFieldsOrdered(PostgresDb.TableName, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]), + "four", "five"); + }) + ]), + TestList("JsonByContains", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + var json = await conn.JsonByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }); + VerifyBeginEnd(json); + VerifyDocById(json, "two"); + VerifyDocById(json, "four"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyEmpty(await conn.JsonByContains(PostgresDb.TableName, new { Value = "mauve" })); + }) + ]), + TestList("JsonByContainsOrdered", + [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder( + await conn.JsonByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar")]), + "two", "four"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder( + await conn.JsonByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar DESC")]), + "four", "two"); + }) + ]), + TestList("JsonByJsonPath", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + var json = await conn.JsonByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 15)"); + VerifyBeginEnd(json); + VerifyDocById(json, "one"); + VerifyDocById(json, "two"); + VerifyDocById(json, "three"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyEmpty(await conn.JsonByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 0)")); + }) + ]), + TestList("JsonByJsonPathOrdered", + [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder( + await conn.JsonByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue")]), + "one", "three", "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder( + await conn.JsonByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue DESC")]), + "two", "three", "one"); + }) + ]), + TestList("JsonFirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById( + await conn.JsonFirstByFields(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "another")]), + "two"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyAnyById( + await conn.JsonFirstByFields(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "purple")]), + ["five", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyNoDoc(await conn.JsonFirstByFields(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "absent")])); + }) + ]), + TestList("JsonFirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById( + await conn.JsonFirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id")]), + "five"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById( + await conn.JsonFirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]), + "four"); + }) + ]), + TestList("JsonFirstByContains", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById(await conn.JsonFirstByContains(PostgresDb.TableName, new { Value = "another" }), "two"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyAnyById(await conn.JsonFirstByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }), + ["two", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyNoDoc(await conn.JsonFirstByContains(PostgresDb.TableName, new { Value = "absent" })); + }) + ]), + TestList("JsonFirstByContainsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById( + await conn.JsonFirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Value")]), + "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById( + await conn.JsonFirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Value DESC")]), + "four"); + }) + ]), + TestList("JsonFirstByJsonPath", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById(await conn.JsonFirstByJsonPath(PostgresDb.TableName, """$.Value ? (@ == "FIRST!")"""), + "one"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyAnyById(await conn.JsonFirstByJsonPath(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")"""), + ["two", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyNoDoc(await conn.JsonFirstByJsonPath(PostgresDb.TableName, """$.Id ? (@ == "nope")""")); + }) + ]), + TestList("JsonFirstByJsonPathOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById( + await conn.JsonFirstByJsonPathOrdered(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")""", + [Field.Named("Sub.Bar")]), + "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById( + await conn.JsonFirstByJsonPathOrdered(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")""", + [Field.Named("Sub.Bar DESC")]), + "four"); + }) + ]), + TestList("WriteJsonAll", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAll(PostgresDb.TableName, writer); + VerifyAllData(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAll(PostgresDb.TableName, writer); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonAllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue")]); + VerifyExpectedOrder(StreamText(stream), "one", "three", "two", "four", "five"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue DESC")]); + VerifyExpectedOrder(StreamText(stream), "five", "four", "two", "three", "one"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAllOrdered(PostgresDb.TableName, writer, [Field.Named("Id DESC")]); + VerifyExpectedOrder(StreamText(stream), "two", "three", "one", "four", "five"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonById(PostgresDb.TableName, writer, "two"); + var json = StreamText(stream); + Expect.stringStarts(json, """{"Id": "two",""", "An incorrect document was returned"); + Expect.stringEnds(json, "}", "JSON should have ended with this document"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonById(PostgresDb.TableName, writer, "three hundred eighty-seven"); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.In("Value", ["purple", "blue"]), Field.Exists("Sub")]); + VerifySingleById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when documents are found using IN with numeric field", async() => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.In("NumValue", [2, 4, 6, 8])]); + VerifySingleById(StreamText(stream), "three"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "mauve"), Field.NotEqual("NumValue", 40)]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await conn.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(PostgresDb.TableName, doc); + + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["c"])]); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "first"); + VerifyDocById(json, "second"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await conn.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(PostgresDb.TableName, doc); + + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["j"])]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id")]); + VerifyExpectedOrder(StreamText(stream), "five", "four"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); + VerifyExpectedOrder(StreamText(stream), "four", "five"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonByContains", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByContains(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "two"); + VerifyDocById(json, "four"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByContains(PostgresDb.TableName, writer, new { Value = "mauve" }); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonByContainsOrdered", + [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar")]); + VerifyExpectedOrder(StreamText(stream), "two", "four"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar DESC")]); + VerifyExpectedOrder(StreamText(stream), "four", "two"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonByJsonPath", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)"); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "one"); + VerifyDocById(json, "two"); + VerifyDocById(json, "three"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 0)"); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonByJsonPathOrdered", + [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue")]); + VerifyExpectedOrder(StreamText(stream), "one", "three", "two"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue DESC")]); + VerifyExpectedOrder(StreamText(stream), "two", "three", "one"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonFirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "another")]); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")]); + VerifyAnyById(StreamText(stream), ["five", "four"]); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "absent")]); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonFirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id")]); + VerifyDocById(StreamText(stream), "five"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); + VerifyDocById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonFirstByContains", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByContains(PostgresDb.TableName, writer, new { Value = "another" }); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByContains(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }); + VerifyAnyById(StreamText(stream), ["two", "four"]); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByContains(PostgresDb.TableName, writer, new { Value = "absent" }); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonFirstByContainsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, [Field.Named("Value")]); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, [Field.Named("Value DESC")]); + VerifyDocById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonFirstByJsonPath", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByJsonPath(PostgresDb.TableName, writer, """$.Value ? (@ == "FIRST!")"""); + VerifyDocById(StreamText(stream), "one"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByJsonPath(PostgresDb.TableName, writer, """$.Sub.Foo ? (@ == "green")"""); + VerifyAnyById(StreamText(stream), ["two", "four"]); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByJsonPath(PostgresDb.TableName, writer, """$.Id ? (@ == "nope")"""); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonFirstByJsonPathOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByJsonPathOrdered(PostgresDb.TableName, writer, + """$.Sub.Foo ? (@ == "green")""", [Field.Named("Sub.Bar")]); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByJsonPathOrdered(PostgresDb.TableName, writer, + """$.Sub.Foo ? (@ == "green")""", [Field.Named("Sub.Bar DESC")]); + VerifyDocById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), TestList("UpdateById", [ TestCase("succeeds when a document is updated", async () => { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.UpdateById(PostgresDb.TableName, "one", new JsonDocument { Id = "one", Sub = new() { Foo = "blue", Bar = "red" } }); @@ -792,7 +2066,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should have been no documents returned"); - + // This not raising an exception is the test await conn.UpdateById(PostgresDb.TableName, "test", new JsonDocument { Id = "x", Sub = new() { Foo = "blue", Bar = "red" } }); @@ -804,7 +2078,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.UpdateByFunc(PostgresDb.TableName, doc => doc.Id, new JsonDocument { Id = "one", Value = "le un", NumValue = 1 }); @@ -821,7 +2095,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should have been no documents returned"); - + // This not raising an exception is the test await conn.UpdateByFunc(PostgresDb.TableName, doc => doc.Id, new JsonDocument { Id = "one", Value = "le un", NumValue = 1 }); @@ -833,7 +2107,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.PatchById(PostgresDb.TableName, "one", new { NumValue = 44 }); var after = await conn.FindById(PostgresDb.TableName, "one"); @@ -846,7 +2120,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should have been no documents returned"); - + // This not raising an exception is the test await conn.PatchById(PostgresDb.TableName, "test", new { Foo = "green" }); }) @@ -857,7 +2131,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.PatchByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], new { NumValue = 77 }); @@ -871,7 +2145,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should have been no documents returned"); - + // This not raising an exception is the test await conn.PatchByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "burgundy")], new { Foo = "green" }); @@ -883,7 +2157,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.PatchByContains(PostgresDb.TableName, new { Value = "purple" }, new { NumValue = 77 }); var after = await conn.CountByContains(PostgresDb.TableName, new { NumValue = 77 }); @@ -895,7 +2169,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should have been no documents returned"); - + // This not raising an exception is the test await conn.PatchByContains(PostgresDb.TableName, new { Value = "burgundy" }, new { Foo = "green" }); }) @@ -906,7 +2180,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.PatchByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ > 10)", new { NumValue = 1000 }); var after = await conn.CountByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ > 999)"); @@ -918,7 +2192,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should have been no documents returned"); - + // This not raising an exception is the test await conn.PatchByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 0)", new { Foo = "green" }); }) @@ -929,7 +2203,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsById(PostgresDb.TableName, "two", ["Sub", "Value"]); var updated = await Find.ById(PostgresDb.TableName, "two"); @@ -941,7 +2215,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsById(PostgresDb.TableName, "two", ["Sub"]); var updated = await Find.ById(PostgresDb.TableName, "two"); @@ -953,8 +2227,8 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); - + await LoadDocs(conn); + // This not raising an exception is the test await conn.RemoveFieldsById(PostgresDb.TableName, "two", ["AFieldThatIsNotThere"]); }), @@ -962,7 +2236,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - + // This not raising an exception is the test await conn.RemoveFieldsById(PostgresDb.TableName, "two", ["Value"]); }) @@ -973,7 +2247,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("NumValue", "17")], ["Sub", "Value"]); @@ -986,7 +2260,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("NumValue", "17")], ["Sub"]); @@ -999,8 +2273,8 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); - + await LoadDocs(conn); + // This not raising an exception is the test await conn.RemoveFieldsByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("NumValue", "17")], ["Nothing"]); @@ -1009,7 +2283,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - + // This not raising an exception is the test await conn.RemoveFieldsByFields(PostgresDb.TableName, FieldMatch.Any, [Field.NotEqual("Abracadabra", "apple")], ["Value"]); @@ -1021,7 +2295,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsByContains(PostgresDb.TableName, new { NumValue = 17 }, ["Sub", "Value"]); var updated = await Find.ById(PostgresDb.TableName, "four"); @@ -1033,7 +2307,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsByContains(PostgresDb.TableName, new { NumValue = 17 }, ["Sub"]); var updated = await Find.ById(PostgresDb.TableName, "four"); @@ -1045,8 +2319,8 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); - + await LoadDocs(conn); + // This not raising an exception is the test await conn.RemoveFieldsByContains(PostgresDb.TableName, new { NumValue = 17 }, ["Nothing"]); }), @@ -1054,7 +2328,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - + // This not raising an exception is the test await conn.RemoveFieldsByContains(PostgresDb.TableName, new { Abracadabra = "apple" }, ["Value"]); }) @@ -1065,7 +2339,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ == 17)", ["Sub", "Value"]); var updated = await Find.ById(PostgresDb.TableName, "four"); @@ -1077,7 +2351,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ == 17)", ["Sub"]); var updated = await Find.ById(PostgresDb.TableName, "four"); @@ -1089,8 +2363,8 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); - + await LoadDocs(conn); + // This not raising an exception is the test await conn.RemoveFieldsByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ == 17)", ["Nothing"]); }), @@ -1098,7 +2372,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - + // This not raising an exception is the test await conn.RemoveFieldsByJsonPath(PostgresDb.TableName, "$.Abracadabra ? (@ == \"apple\")", ["Value"]); }) @@ -1109,7 +2383,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteById(PostgresDb.TableName, "four"); var remaining = await conn.CountAll(PostgresDb.TableName); @@ -1119,7 +2393,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteById(PostgresDb.TableName, "thirty"); var remaining = await conn.CountAll(PostgresDb.TableName); @@ -1132,7 +2406,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByFields(PostgresDb.TableName, FieldMatch.Any, [Field.NotEqual("Value", "purple")]); var remaining = await conn.CountAll(PostgresDb.TableName); @@ -1142,7 +2416,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "crimson")]); var remaining = await conn.CountAll(PostgresDb.TableName); @@ -1155,7 +2429,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByContains(PostgresDb.TableName, new { Value = "purple" }); var remaining = await conn.CountAll(PostgresDb.TableName); @@ -1165,7 +2439,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByContains(PostgresDb.TableName, new { Value = "crimson" }); var remaining = await conn.CountAll(PostgresDb.TableName); @@ -1178,7 +2452,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByJsonPath(PostgresDb.TableName, "$.Sub.Foo ? (@ == \"green\")"); var remaining = await conn.CountAll(PostgresDb.TableName); @@ -1188,7 +2462,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ > 100)"); var remaining = await conn.CountAll(PostgresDb.TableName); diff --git a/src/Tests.CSharp/PostgresCSharpTests.cs b/src/Tests.CSharp/PostgresCSharpTests.cs index 3a95a2c..da3a991 100644 --- a/src/Tests.CSharp/PostgresCSharpTests.cs +++ b/src/Tests.CSharp/PostgresCSharpTests.cs @@ -1,3 +1,4 @@ +using System.IO.Pipelines; using Expecto.CSharp; using Expecto; using BitBadger.Documents.Postgres; @@ -9,7 +10,7 @@ using static CommonExtensionsAndTypesForNpgsqlFSharp; using static Runner; /// -/// C# tests for the PostgreSQL implementation of BitBadger.Documents +/// C# tests for the PostgreSQL implementation of BitBadger.Documents /// public static class PostgresCSharpTests { @@ -319,15 +320,70 @@ public static class PostgresCSharpTests "By-JSON Path query not correct"); }) ]); - + /// /// Add the test documents to the database /// - internal static async Task LoadDocs() + private static async Task LoadDocs() { foreach (var doc in JsonDocument.TestDocuments) await Document.Insert(SqliteDb.TableName, doc); } + /// Set up a stream writer for a test + private static PipeWriter WriteStream(Stream stream) => + PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + + /// Get the text of the given stream + private static string StreamText(Stream stream) + { + stream.Position = 0L; + using StreamReader reader = new(stream); + return reader.ReadToEnd(); + } + + /// Verify a JSON array begins with "[" and ends with "]" + private static void VerifyBeginEnd(string json) + { + Expect.stringStarts(json, "[", "The array should have started with `[`"); + Expect.stringEnds(json, "]", "The array should have ended with `]`"); + } + + /// Verify the presence of a document by its ID + private static void VerifyDocById(string json, string docId) => + Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); + + /// Verify the presence of a document by its ID + private static void VerifySingleById(string json, string docId) + { + VerifyBeginEnd(json); + Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); + } + + /// Verify the presence of any of the given document IDs in the given JSON + private static void VerifyAnyById(string json, IEnumerable docIds) + { + var theIds = docIds.ToList(); + if (theIds.Any(it => json.Contains($"{{\"Id\": \"{it}\""))) return; + var ids = string.Join(", ", theIds); + Expect.isTrue(false, $"Could not find any of IDs {ids} in {json}"); + } + + /// Verify the JSON for `all` returning data + private static void VerifyAllData(string json) + { + VerifyBeginEnd(json); + IEnumerable ids = ["one", "two", "three", "four", "five"]; + foreach (var docId in ids) VerifyDocById(json, docId); + } + + /// Verify an empty JSON array + private static void VerifyEmpty(string json) => + Expect.equal(json, "[]", "There should be no documents returned"); + + /// Verify an empty JSON document + private static void VerifyNoDoc(string json) => + Expect.equal(json, "{}", "There should be no document returned"); + /// /// Integration tests for the Configuration module of the PostgreSQL library /// @@ -389,6 +445,73 @@ public static class PostgresCSharpTests Expect.isEmpty(docs, "There should have been no documents returned"); }) ]), + TestList("JsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + var docs = await Custom.JsonArray(Query.Find(PostgresDb.TableName), Parameters.None, + Results.JsonFromData); + Expect.stringStarts(docs, "[", "The JSON array should have started with `[`"); + Expect.hasLength(docs.Split("{\"Id\":"), 6, "There should have been 5 documents returned"); + Expect.stringEnds(docs, "]", "The JSON array should have ended with `[`"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + var docs = await Custom.JsonArray( + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", + [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))], Results.JsonFromData); + Expect.equal(docs, "[]", "There should have been no documents returned"); + }) + ]), + TestList("WriteJsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Custom.WriteJsonArray(Query.Find(PostgresDb.TableName), Parameters.None, writer, + Results.JsonFromData); + var docs = StreamText(stream); + Expect.stringStarts(docs, "[", "The JSON array should have started with `[`"); + Expect.hasLength(docs.Split("{\"Id\":"), 6, "There should have been 5 documents returned"); + Expect.stringEnds(docs, "]", "The JSON array should have ended with `[`"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Custom.WriteJsonArray( + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", + [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))], writer, Results.JsonFromData); + Expect.equal(StreamText(stream), "[]", "There should have been no documents returned"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), TestList("Single", [ TestCase("succeeds when a row is found", async () => @@ -411,6 +534,29 @@ public static class PostgresCSharpTests Expect.isNull(doc, "There should not have been a document returned"); }) ]), + TestList("JsonSingle", + [ + TestCase("succeeds when a row is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + var doc = await Custom.JsonSingle($"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", + [Tuple.Create("@id", Sql.@string("one"))], Results.JsonFromData); + Expect.stringStarts(doc, "{", "The document should have started with an open brace"); + Expect.stringContains(doc, "\"Id\": \"one\"", "An incorrect document was returned"); + Expect.stringEnds(doc, "}", "The document should have ended with a closing brace"); + }), + TestCase("succeeds when a row is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + var doc = await Custom.JsonSingle($"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", + [Tuple.Create("@id", Sql.@string("eighty"))], Results.JsonFromData); + Expect.equal(doc, "{}", "There should not have been a document returned"); + }) + ]), TestList("NonQuery", [ TestCase("succeeds when operating on data", async () => @@ -1198,6 +1344,1053 @@ public static class PostgresCSharpTests ]) ]); + /// Verify the JSON for an ordered query + private static void VerifyExpectedOrder(string json, string idFirst, string idSecond, string? idThird = null, + string? idFourth = null, string? idFifth = null) + { + var firstIdx = json.IndexOf($"{{\"Id\": \"{idFirst}\",", StringComparison.Ordinal); + var secondIdx = json.IndexOf($"{{\"Id\": \"{idSecond}\",", StringComparison.Ordinal); + VerifyBeginEnd(json); + Expect.isGreaterThan(secondIdx, firstIdx, $"`{idSecond}` should have been after `{idFirst}`"); + if (idThird is null) return; + + var thirdIdx = json.IndexOf($"{{\"Id\": \"{idThird}\",", StringComparison.Ordinal); + Expect.isGreaterThan(thirdIdx, secondIdx, $"`{idThird}` should have been after `{idSecond}`"); + if (idFourth is null) return; + + var fourthIdx = json.IndexOf($"{{\"Id\": \"{idFourth}\",", StringComparison.Ordinal); + Expect.isGreaterThan(fourthIdx, thirdIdx, $"`{idFourth}` should have been after `{idThird}`"); + if (idFifth is null) return; + + var fifthIdx = json.IndexOf($"{{\"Id\": \"{idFifth}\",", StringComparison.Ordinal); + Expect.isGreaterThan(fifthIdx, fourthIdx, $"`{idFifth}` should have been after `{idFourth}`"); + } + + /// + /// Integration tests for the Json module of the PostgreSQL library + /// + private static readonly Test JsonTests = TestList("Json", + [ + TestList("All", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyAllData(await Json.All(PostgresDb.TableName)); + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = PostgresDb.BuildDb(); + VerifyEmpty(await Json.All(PostgresDb.TableName)); + }) + ]), + TestList("AllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder(await Json.AllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue")]), + "one", "three", "two", "four", "five"); + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder(await Json.AllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue DESC")]), + "five", "four", "two", "three", "one"); + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder(await Json.AllOrdered(PostgresDb.TableName, [Field.Named("Id DESC")]), + "two", "three", "one", "four", "five"); + }) + ]), + TestList("ById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + var json = await Json.ById(PostgresDb.TableName, "two"); + Expect.stringStarts(json, """{"Id": "two",""", "An incorrect document was returned"); + Expect.stringEnds(json, "}", "JSON should have ended with this document"); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyNoDoc(await Json.ById(PostgresDb.TableName, "three hundred eighty-seven")); + }) + ]), + TestList("ByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifySingleById( + await Json.ByFields(PostgresDb.TableName, FieldMatch.All, + [Field.In("Value", ["purple", "blue"]), Field.Exists("Sub")]), + "four"); + }), + TestCase("succeeds when documents are found using IN with numeric field", async() => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifySingleById( + await Json.ByFields(PostgresDb.TableName, FieldMatch.All, [Field.In("NumValue", [2, 4, 6, 8])]), + "three"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyEmpty(await Json.ByFields(PostgresDb.TableName, FieldMatch.All, + [Field.Equal("Value", "mauve"), Field.NotEqual("NumValue", 40)])); + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await Definition.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(PostgresDb.TableName, doc); + + var json = await Json.ByFields(PostgresDb.TableName, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["c"])]); + VerifyBeginEnd(json); + VerifyDocById(json, "first"); + VerifyDocById(json, "second"); + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await Definition.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(PostgresDb.TableName, doc); + VerifyEmpty(await Json.ByFields(PostgresDb.TableName, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["j"])])); + }) + ]), + TestList("ByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder( + await Json.ByFieldsOrdered(PostgresDb.TableName, FieldMatch.All, [Field.Equal("Value", "purple")], + [Field.Named("Id")]), + "five", "four"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder( + await Json.ByFieldsOrdered(PostgresDb.TableName, FieldMatch.All, [Field.Equal("Value", "purple")], + [Field.Named("Id DESC")]), + "four", "five"); + }) + ]), + TestList("ByContains", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + var json = await Json.ByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }); + VerifyBeginEnd(json); + VerifyDocById(json, "two"); + VerifyDocById(json, "four"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyEmpty(await Json.ByContains(PostgresDb.TableName, new { Value = "mauve" })); + }) + ]), + TestList("ByContainsOrdered", + [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder( + await Json.ByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar")]), + "two", "four"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder( + await Json.ByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar DESC")]), + "four", "two"); + }) + ]), + TestList("ByJsonPath", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + var json = await Json.ByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 15)"); + VerifyBeginEnd(json); + VerifyDocById(json, "one"); + VerifyDocById(json, "two"); + VerifyDocById(json, "three"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyEmpty(await Json.ByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 0)")); + }) + ]), + TestList("ByJsonPathOrdered", + [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder( + await Json.ByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue")]), + "one", "three", "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder( + await Json.ByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue DESC")]), + "two", "three", "one"); + }) + ]), + TestList("FirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById( + await Json.FirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "another")]), + "two"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyAnyById( + await Json.FirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")]), + ["five", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyNoDoc(await Json.FirstByFields(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "absent")])); + }) + ]), + TestList("FirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById( + await Json.FirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id")]), + "five"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById( + await Json.FirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]), + "four"); + }) + ]), + TestList("FirstByContains", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById(await Json.FirstByContains(PostgresDb.TableName, new { Value = "another" }), "two"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyAnyById(await Json.FirstByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }), + ["two", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyNoDoc(await Json.FirstByContains(PostgresDb.TableName, new { Value = "absent" })); + }) + ]), + TestList("FirstByContainsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById( + await Json.FirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Value")]), + "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById( + await Json.FirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Value DESC")]), + "four"); + }) + ]), + TestList("FirstByJsonPath", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById(await Json.FirstByJsonPath(PostgresDb.TableName, """$.Value ? (@ == "FIRST!")"""), "one"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyAnyById(await Json.FirstByJsonPath(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")"""), + ["two", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyNoDoc(await Json.FirstByJsonPath(PostgresDb.TableName, """$.Id ? (@ == "nope")""")); + }) + ]), + TestList("FirstByJsonPathOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById( + await Json.FirstByJsonPathOrdered(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")""", + [Field.Named("Sub.Bar")]), + "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById( + await Json.FirstByJsonPathOrdered(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")""", + [Field.Named("Sub.Bar DESC")]), + "four"); + }) + ]), + TestList("WriteAll", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteAll(PostgresDb.TableName, writer); + VerifyAllData(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = PostgresDb.BuildDb(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteAll(PostgresDb.TableName, writer); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteAllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue")]); + VerifyExpectedOrder(StreamText(stream), "one", "three", "two", "four", "five"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue DESC")]); + VerifyExpectedOrder(StreamText(stream), "five", "four", "two", "three", "one"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteAllOrdered(PostgresDb.TableName, writer, [Field.Named("Id DESC")]); + VerifyExpectedOrder(StreamText(stream), "two", "three", "one", "four", "five"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteById(PostgresDb.TableName, writer, "two"); + var json = StreamText(stream); + Expect.stringStarts(json, """{"Id": "two",""", "An incorrect document was returned"); + Expect.stringEnds(json, "}", "JSON should have ended with this document"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteById(PostgresDb.TableName, writer, "three hundred eighty-seven"); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.In("Value", ["purple", "blue"]), Field.Exists("Sub")]); + VerifySingleById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when documents are found using IN with numeric field", async() => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.In("NumValue", [2, 4, 6, 8])]); + VerifySingleById(StreamText(stream), "three"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "mauve"), Field.NotEqual("NumValue", 40)]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await Definition.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(PostgresDb.TableName, doc); + + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["c"])]); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "first"); + VerifyDocById(json, "second"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await Definition.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(PostgresDb.TableName, doc); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["j"])]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id")]); + VerifyExpectedOrder(StreamText(stream), "five", "four"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); + VerifyExpectedOrder(StreamText(stream), "four", "five"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteByContains", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByContains(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "two"); + VerifyDocById(json, "four"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByContains(PostgresDb.TableName, writer, new { Value = "mauve" }); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteByContainsOrdered", + [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByContainsOrdered(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar")]); + VerifyExpectedOrder(StreamText(stream), "two", "four"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByContainsOrdered(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar DESC")]); + VerifyExpectedOrder(StreamText(stream), "four", "two"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteByJsonPath", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)"); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "one"); + VerifyDocById(json, "two"); + VerifyDocById(json, "three"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 0)"); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteByJsonPathOrdered", + [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue")]); + VerifyExpectedOrder(StreamText(stream), "one", "three", "two"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue DESC")]); + VerifyExpectedOrder(StreamText(stream), "two", "three", "one"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteFirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "another")]); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")]); + VerifyAnyById(StreamText(stream), ["five", "four"]); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "absent")]); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteFirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id")]); + VerifyDocById(StreamText(stream), "five"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); + VerifyDocById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteFirstByContains", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByContains(PostgresDb.TableName, writer, new { Value = "another" }); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByContains(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }); + VerifyAnyById(StreamText(stream), ["two", "four"]); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByContains(PostgresDb.TableName, writer, new { Value = "absent" }); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteFirstByContainsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, [Field.Named("Value")]); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, [Field.Named("Value DESC")]); + VerifyDocById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteFirstByJsonPath", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByJsonPath(PostgresDb.TableName, writer, """$.Value ? (@ == "FIRST!")"""); + VerifyDocById(StreamText(stream), "one"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByJsonPath(PostgresDb.TableName, writer, """$.Sub.Foo ? (@ == "green")"""); + VerifyAnyById(StreamText(stream), ["two", "four"]); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByJsonPath(PostgresDb.TableName, writer, """$.Id ? (@ == "nope")"""); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteFirstByJsonPathOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByJsonPathOrdered(PostgresDb.TableName, writer, + """$.Sub.Foo ? (@ == "green")""", + [Field.Named("Sub.Bar")]); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByJsonPathOrdered(PostgresDb.TableName, writer, + """$.Sub.Foo ? (@ == "green")""", + [Field.Named("Sub.Bar DESC")]); + VerifyDocById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]) + ]); + /// /// Integration tests for the Update module of the PostgreSQL library /// @@ -1639,6 +2832,7 @@ public static class PostgresCSharpTests CountTests, ExistsTests, FindTests, + JsonTests, UpdateTests, PatchTests, RemoveFieldsTests, diff --git a/src/Tests.CSharp/SqliteCSharpExtensionTests.cs b/src/Tests.CSharp/SqliteCSharpExtensionTests.cs index 5d23375..e47aedc 100644 --- a/src/Tests.CSharp/SqliteCSharpExtensionTests.cs +++ b/src/Tests.CSharp/SqliteCSharpExtensionTests.cs @@ -1,17 +1,58 @@ +using System.IO.Pipelines; using Expecto.CSharp; using Expecto; using BitBadger.Documents.Sqlite; +using Microsoft.Data.Sqlite; namespace BitBadger.Documents.Tests.CSharp; using static Runner; /// -/// C# tests for the extensions on the SqliteConnection class +/// C# tests for the extensions on the SqliteConnection class /// public static class SqliteCSharpExtensionTests { - private static Task LoadDocs() => SqliteCSharpTests.LoadDocs(); + private static async Task LoadDocs(SqliteConnection conn) + { + foreach (var doc in JsonDocument.TestDocuments) await conn.Insert(SqliteDb.TableName, doc); + } + + /// Verify a JSON array begins with "[" and ends with "]" + private static void VerifyBeginEnd(string json) + { + Expect.stringStarts(json, "[", "The array should have started with `[`"); + Expect.stringEnds(json, "]", "The array should have ended with `]`"); + } + + /// Verify an empty JSON array + private static void VerifyEmpty(string json) => + Expect.equal(json, "[]", "There should be no documents returned"); + + /// Verify an empty JSON document + private static void VerifyNoDoc(string json) => + Expect.equal(json, "{}", "There should be no document returned"); + + /// Set up a stream writer for a test + private static PipeWriter WriteStream(Stream stream) => + PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + + /// Get the text of the given stream + private static string StreamText(Stream stream) + { + stream.Position = 0L; + using StreamReader reader = new(stream); + return reader.ReadToEnd(); + } + + /// Verify the presence of any of the given documents in the given JSON + private static void VerifyAny(string json, IEnumerable docs) + { + var theDocs = docs.ToList(); + if (theDocs.Any(json.Contains)) return; + var anyDocs = string.Join(" | ", theDocs); + Expect.isTrue(false, $"Could not find any of |{anyDocs}| in {json}"); + } /// /// Integration tests for the SQLite extension methods @@ -19,13 +60,111 @@ public static class SqliteCSharpExtensionTests [Tests] public static readonly Test Integration = TestList("Sqlite.C#.Extensions", [ + TestList("CustomList", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + var docs = await conn.CustomList(Query.Find(SqliteDb.TableName), Parameters.None, + Results.FromData); + Expect.equal(docs.Count, 5, "There should have been 5 documents returned"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + var docs = await conn.CustomList( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", [new("@value", 100)], + Results.FromData); + Expect.isEmpty(docs, "There should have been no documents returned"); + }) + ]), + TestList("CustomJsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + var json = await conn.CustomJsonArray(Query.Find(SqliteDb.TableName), [], Results.JsonFromData); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.One, "Document ID `one` should have been found"); + Expect.stringContains(json, JsonDocument.Two,"Document ID `two` should have been found"); + Expect.stringContains(json, JsonDocument.Three, "Document ID `three` should have been found"); + Expect.stringContains(json, JsonDocument.Four, "Document ID `four` should have been found"); + Expect.stringContains(json, JsonDocument.Five, "Document ID `five` should have been found"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + VerifyEmpty(await conn.CustomJsonArray( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", + [new SqliteParameter("@value", 100)], Results.JsonFromData)); + }) + ]), + TestList("WriteCustomJsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteCustomJsonArray(Query.Find(SqliteDb.TableName), [], writer, Results.JsonFromData); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.One, "Document ID `one` should have been found"); + Expect.stringContains(json, JsonDocument.Two, "Document ID `two` should have been found"); + Expect.stringContains(json, JsonDocument.Three, "Document ID `three` should have been found"); + Expect.stringContains(json, JsonDocument.Four, "Document ID `four` should have been found"); + Expect.stringContains(json, JsonDocument.Five, "Document ID `five` should have been found"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteCustomJsonArray( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", + [new SqliteParameter("@value", 100)], writer, Results.JsonFromData); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), TestList("CustomSingle", [ TestCase("succeeds when a row is found", async () => { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.CustomSingle($"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id", [Parameters.Id("one")], Results.FromData); @@ -36,35 +175,35 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.CustomSingle($"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id", [Parameters.Id("eighty")], Results.FromData); Expect.isNull(doc, "There should not have been a document returned"); }) ]), - TestList("CustomList", + TestList("CustomJsonSingle", [ - TestCase("succeeds when data is found", async () => + TestCase("succeeds when a row is found", async () => { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); - var docs = await conn.CustomList(Query.Find(SqliteDb.TableName), Parameters.None, - Results.FromData); - Expect.equal(docs.Count, 5, "There should have been 5 documents returned"); + var json = await conn.CustomJsonSingle( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id", + [new SqliteParameter("@id", "one")], Results.JsonFromData); + Expect.equal(json, JsonDocument.One, "The JSON document is incorrect"); }), - TestCase("succeeds when data is not found", async () => + TestCase("succeeds when a row is not found", async () => { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); - var docs = await conn.CustomList( - $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", [new("@value", 100)], - Results.FromData); - Expect.isEmpty(docs, "There should have been no documents returned"); + VerifyNoDoc(await conn.CustomJsonSingle( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id", + [new SqliteParameter("@id", "eighty")], Results.JsonFromData)); }) ]), TestList("CustomNonQuery", @@ -73,7 +212,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.CustomNonQuery($"DELETE FROM {SqliteDb.TableName}", Parameters.None); @@ -84,7 +223,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.CustomNonQuery($"DELETE FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", [new("@value", 100)]); @@ -210,7 +349,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var theCount = await conn.CountAll(SqliteDb.TableName); Expect.equal(theCount, 5L, "There should have been 5 matching documents"); @@ -219,7 +358,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var theCount = await conn.CountByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")]); @@ -231,7 +370,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsById(SqliteDb.TableName, "three"); Expect.isTrue(exists, "There should have been an existing document"); @@ -240,7 +379,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsById(SqliteDb.TableName, "seven"); Expect.isFalse(exists, "There should not have been an existing document"); @@ -252,7 +391,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByFields(SqliteDb.TableName, FieldMatch.Any, [Field.GreaterOrEqual("NumValue", 10)]); @@ -262,7 +401,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Nothing", "none")]); @@ -297,7 +436,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var results = await conn.FindAllOrdered(SqliteDb.TableName, [Field.Named("n:NumValue")]); Expect.hasLength(results, 5, "There should have been 5 documents returned"); @@ -308,7 +447,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var results = await conn.FindAllOrdered(SqliteDb.TableName, [Field.Named("n:NumValue DESC")]); @@ -320,7 +459,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var results = await conn.FindAllOrdered(SqliteDb.TableName, [Field.Named("Id DESC")]); Expect.hasLength(results, 5, "There should have been 5 documents returned"); @@ -334,7 +473,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindById(SqliteDb.TableName, "two"); Expect.isNotNull(doc, "There should have been a document returned"); @@ -344,7 +483,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindById(SqliteDb.TableName, "eighty-seven"); Expect.isNull(doc, "There should not have been a document returned"); @@ -356,7 +495,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)]); @@ -366,7 +505,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "mauve")]); @@ -379,7 +518,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)], [Field.Named("Id")]); @@ -390,7 +529,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)], [Field.Named("Id DESC")]); @@ -404,7 +543,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "another")]); @@ -415,7 +554,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Sub.Foo", "green")]); @@ -426,7 +565,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "absent")]); @@ -439,7 +578,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar")]); @@ -450,7 +589,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar DESC")]); @@ -458,13 +597,680 @@ public static class SqliteCSharpExtensionTests Expect.equal("four", doc!.Id, "An incorrect document was returned"); }) ]), + TestList("JsonAll", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + + await conn.Insert(SqliteDb.TableName, new SubDocument { Foo = "one", Bar = "two" }); + await conn.Insert(SqliteDb.TableName, new SubDocument { Foo = "three", Bar = "four" }); + await conn.Insert(SqliteDb.TableName, new SubDocument { Foo = "five", Bar = "six" }); + + var json = await conn.JsonAll(SqliteDb.TableName); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Foo":"one","Bar":"two"}""", "The first document was not found"); + Expect.stringContains(json, """{"Foo":"three","Bar":"four"}""", "The second document was not found"); + Expect.stringContains(json, """{"Foo":"five","Bar":"six"}""", "The third document was not found"); + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + VerifyEmpty(await conn.JsonAll(SqliteDb.TableName)); + }) + ]), + TestList("JsonAllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal(await conn.JsonAllOrdered(SqliteDb.TableName, [Field.Named("n:NumValue")]), + $"[{JsonDocument.One},{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal(await conn.JsonAllOrdered(SqliteDb.TableName, [Field.Named("n:NumValue DESC")]), + $"[{JsonDocument.Five},{JsonDocument.Four},{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One}]", + "The documents were not ordered correctly"); + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal(await conn.JsonAllOrdered(SqliteDb.TableName, [Field.Named("Id DESC")]), + $"[{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + }) + ]), + TestList("JsonById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal(await conn.JsonById(SqliteDb.TableName, "two"), JsonDocument.Two, + "The incorrect document was returned"); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + VerifyNoDoc(await conn.JsonById(SqliteDb.TableName, "three hundred eighty-seven")); + }) + ]), + TestList("JsonByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + var json = await conn.JsonByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)]); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.Four, "Document `four` should have been returned"); + Expect.stringContains(json, JsonDocument.Five, "Document `five` should have been returned"); + }), + TestCase("succeeds when documents are found using IN with numeric field", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonByFields(SqliteDb.TableName, FieldMatch.All, [Field.In("NumValue", [2, 4, 6, 8])]), + $"[{JsonDocument.Three}]", "There should have been one document returned"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + VerifyEmpty(await conn.JsonByFields(SqliteDb.TableName, FieldMatch.Any, + [Field.Greater("NumValue", 100)])); + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await conn.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(SqliteDb.TableName, doc); + + var json = await conn.JsonByFields(SqliteDb.TableName, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["c"])]); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Id":"first","Values":["a","b","c"]}""", + "Document `first` should have been returned"); + Expect.stringContains(json, """{"Id":"second","Values":["c","d","e"]}""", + "Document `second` should have been returned"); + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await conn.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(SqliteDb.TableName, doc); + VerifyEmpty(await conn.JsonByFields(SqliteDb.TableName, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["j"])])); + }) + ]), + TestList("JsonByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)], + [Field.Named("Id")]), $"[{JsonDocument.Five},{JsonDocument.Four}]", + "Incorrect documents were returned"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)], + [Field.Named("Id DESC")]), $"[{JsonDocument.Four},{JsonDocument.Five}]", + "Incorrect documents were returned"); + }), + TestCase("succeeds when sorting case-sensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonByFieldsOrdered(SqliteDb.TableName, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("Value")]), + $"[{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Two}]", "Documents not ordered correctly"); + }), + TestCase("succeeds when sorting case-insensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonByFieldsOrdered(SqliteDb.TableName, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("i:Value")]), + $"[{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.One}]", "Documents not ordered correctly"); + }) + ]), + TestList("JsonFirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonFirstByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "another")]), + JsonDocument.Two, "The incorrect document was returned"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + var json = await conn.JsonFirstByFields(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")]); + Expect.notEqual(json, "{}", "There should have been a document returned"); + VerifyAny(json, [JsonDocument.Two, JsonDocument.Four]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + VerifyNoDoc(await conn.JsonFirstByFields(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "absent")])); + }) + ]), + TestList("JsonFirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonFirstByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar")]), JsonDocument.Two, + "An incorrect document was returned"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonFirstByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar DESC")]), JsonDocument.Four, + "An incorrect document was returned"); + }) + ]), + TestList("WriteJsonAll", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + + await conn.Insert(SqliteDb.TableName, new SubDocument { Foo = "one", Bar = "two" }); + await conn.Insert(SqliteDb.TableName, new SubDocument { Foo = "three", Bar = "four" }); + await conn.Insert(SqliteDb.TableName, new SubDocument { Foo = "five", Bar = "six" }); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAll(SqliteDb.TableName, writer); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Foo":"one","Bar":"two"}""", "The first document was not found"); + Expect.stringContains(json, """{"Foo":"three","Bar":"four"}""", + "The second document was not found"); + Expect.stringContains(json, """{"Foo":"five","Bar":"six"}""", "The third document was not found"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAll(SqliteDb.TableName, writer); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonAllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.One},{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue DESC")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.Five},{JsonDocument.Four},{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One}]", + "The documents were not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAllOrdered(SqliteDb.TableName, writer, [Field.Named("Id DESC")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonById(SqliteDb.TableName, writer, "two"); + Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonById(SqliteDb.TableName, writer, "three hundred eighty-seven"); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)]); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.Four, "Document `four` should have been returned"); + Expect.stringContains(json, JsonDocument.Five, "Document `five` should have been returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when documents are found using IN with numeric field", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.In("NumValue", [2, 4, 6, 8])]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three}]", + "There should have been one document returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 100)]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await conn.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(SqliteDb.TableName, doc); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["c"])]); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Id":"first","Values":["a","b","c"]}""", + "Document `first` should have been returned"); + Expect.stringContains(json, """{"Id":"second","Values":["c","d","e"]}""", + "Document `second` should have been returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await conn.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(SqliteDb.TableName, doc); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["j"])]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)], [Field.Named("Id")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Five},{JsonDocument.Four}]", + "Incorrect documents were returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)], [Field.Named("Id DESC")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Four},{JsonDocument.Five}]", + "Incorrect documents were returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting case-sensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("Value")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Two}]", + "Documents not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting case-insensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("i:Value")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.One}]", + "Documents not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonFirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "another")]); + Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")]); + var json = StreamText(stream); + Expect.notEqual(json, "{}", "There should have been a document returned"); + VerifyAny(json, [JsonDocument.Two, JsonDocument.Four]); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "absent")]); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteJsonFirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar")]); + Expect.equal(StreamText(stream), JsonDocument.Two, "An incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar DESC")]); + Expect.equal(StreamText(stream), JsonDocument.Four, "An incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), TestList("UpdateById", [ TestCase("succeeds when a document is updated", async () => { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var testDoc = new JsonDocument { Id = "one", Sub = new() { Foo = "blue", Bar = "red" } }; await conn.UpdateById(SqliteDb.TableName, "one", testDoc); @@ -493,7 +1299,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.UpdateByFunc(SqliteDb.TableName, doc => doc.Id, new JsonDocument { Id = "one", Value = "le un", NumValue = 1 }); @@ -522,7 +1328,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.PatchById(SqliteDb.TableName, "one", new { NumValue = 44 }); var after = await conn.FindById(SqliteDb.TableName, "one"); @@ -547,7 +1353,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.PatchByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], new { NumValue = 77 }); @@ -572,7 +1378,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsById(SqliteDb.TableName, "two", ["Sub", "Value"]); var updated = await Find.ById(SqliteDb.TableName, "two"); @@ -584,8 +1390,8 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); - + await LoadDocs(conn); + // This not raising an exception is the test await conn.RemoveFieldsById(SqliteDb.TableName, "two", ["AFieldThatIsNotThere"]); }), @@ -593,7 +1399,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - + // This not raising an exception is the test await conn.RemoveFieldsById(SqliteDb.TableName, "two", ["Value"]); }) @@ -604,7 +1410,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("NumValue", 17)], ["Sub"]); @@ -616,8 +1422,8 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); - + await LoadDocs(conn); + // This not raising an exception is the test await conn.RemoveFieldsByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("NumValue", 17)], ["Nothing"]); @@ -626,7 +1432,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - + // This not raising an exception is the test await conn.RemoveFieldsByFields(SqliteDb.TableName, FieldMatch.Any, [Field.NotEqual("Abracadabra", "apple")], ["Value"]); @@ -638,7 +1444,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteById(SqliteDb.TableName, "four"); var remaining = await conn.CountAll(SqliteDb.TableName); @@ -648,7 +1454,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteById(SqliteDb.TableName, "thirty"); var remaining = await conn.CountAll(SqliteDb.TableName); @@ -661,7 +1467,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByFields(SqliteDb.TableName, FieldMatch.Any, [Field.NotEqual("Value", "purple")]); var remaining = await conn.CountAll(SqliteDb.TableName); @@ -671,7 +1477,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "crimson")]); var remaining = await conn.CountAll(SqliteDb.TableName); diff --git a/src/Tests.CSharp/SqliteCSharpTests.cs b/src/Tests.CSharp/SqliteCSharpTests.cs index 14aa617..c1e10d8 100644 --- a/src/Tests.CSharp/SqliteCSharpTests.cs +++ b/src/Tests.CSharp/SqliteCSharpTests.cs @@ -1,7 +1,9 @@ -using Expecto.CSharp; +using System.IO.Pipelines; +using Expecto.CSharp; using Expecto; using Microsoft.FSharp.Core; using BitBadger.Documents.Sqlite; +using Microsoft.Data.Sqlite; namespace BitBadger.Documents.Tests.CSharp; @@ -147,7 +149,7 @@ public static class SqliteCSharpTests /// /// Add the test documents to the database /// - internal static async Task LoadDocs() + private static async Task LoadDocs() { foreach (var doc in JsonDocument.TestDocuments) await Document.Insert(SqliteDb.TableName, doc); } @@ -169,11 +171,139 @@ public static class SqliteCSharpTests } }); + /// Verify a JSON array begins with "[" and ends with "]" + private static void VerifyBeginEnd(string json) + { + Expect.stringStarts(json, "[", "The array should have started with `[`"); + Expect.stringEnds(json, "]", "The array should have ended with `]`"); + } + + /// Verify an empty JSON array + private static void VerifyEmpty(string json) => + Expect.equal(json, "[]", "There should be no documents returned"); + + /// Verify an empty JSON document + private static void VerifyNoDoc(string json) => + Expect.equal(json, "{}", "There should be no document returned"); + + /// Set up a stream writer for a test + private static PipeWriter WriteStream(Stream stream) => + PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + + /// Get the text of the given stream + private static string StreamText(Stream stream) + { + stream.Position = 0L; + using StreamReader reader = new(stream); + return reader.ReadToEnd(); + } + + /// Verify the presence of any of the given documents in the given JSON + private static void VerifyAny(string json, IEnumerable docs) + { + var theDocs = docs.ToList(); + if (theDocs.Any(json.Contains)) return; + var anyDocs = string.Join(" | ", theDocs); + Expect.isTrue(false, $"Could not find any of |{anyDocs}| in {json}"); + } + /// /// Integration tests for the Custom module of the SQLite library /// private static readonly Test CustomTests = TestList("Custom", [ + TestList("List", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + var docs = await Custom.List(Query.Find(SqliteDb.TableName), Parameters.None, + Results.FromData); + Expect.equal(docs.Count, 5, "There should have been 5 documents returned"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + var docs = await Custom.List( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", [new("@value", 100)], + Results.FromData); + Expect.isEmpty(docs, "There should have been no documents returned"); + }) + ]), + TestList("JsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + var json = await Custom.JsonArray(Query.Find(SqliteDb.TableName), [], Results.JsonFromData); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.One, "Document ID `one` should have been found"); + Expect.stringContains(json, JsonDocument.Two,"Document ID `two` should have been found"); + Expect.stringContains(json, JsonDocument.Three, "Document ID `three` should have been found"); + Expect.stringContains(json, JsonDocument.Four, "Document ID `four` should have been found"); + Expect.stringContains(json, JsonDocument.Five, "Document ID `five` should have been found"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + VerifyEmpty(await Custom.JsonArray( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", + [new SqliteParameter("@value", 100)], Results.JsonFromData)); + }) + ]), + TestList("WriteJsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Custom.WriteJsonArray(Query.Find(SqliteDb.TableName), [], writer, Results.JsonFromData); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.One, "Document ID `one` should have been found"); + Expect.stringContains(json, JsonDocument.Two, "Document ID `two` should have been found"); + Expect.stringContains(json, JsonDocument.Three, "Document ID `three` should have been found"); + Expect.stringContains(json, JsonDocument.Four, "Document ID `four` should have been found"); + Expect.stringContains(json, JsonDocument.Five, "Document ID `five` should have been found"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Custom.WriteJsonArray( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", + [new SqliteParameter("@value", 100)], writer, Results.JsonFromData); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), TestList("Single", [ TestCase("succeeds when a row is found", async () => @@ -196,26 +326,24 @@ public static class SqliteCSharpTests Expect.isNull(doc, "There should not have been a document returned"); }) ]), - TestList("List", + TestList("JsonSingle", [ - TestCase("succeeds when data is found", async () => + TestCase("succeeds when a row is found", async () => { await using var db = await SqliteDb.BuildDb(); await LoadDocs(); - var docs = await Custom.List(Query.Find(SqliteDb.TableName), Parameters.None, - Results.FromData); - Expect.equal(docs.Count, 5, "There should have been 5 documents returned"); + var json = await Custom.JsonSingle($"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id", + [new SqliteParameter("@id", "one")], Results.JsonFromData); + Expect.equal(json, JsonDocument.One, "The JSON document is incorrect"); }), - TestCase("succeeds when data is not found", async () => + TestCase("succeeds when a row is not found", async () => { await using var db = await SqliteDb.BuildDb(); await LoadDocs(); - var docs = await Custom.List( - $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", [new("@value", 100)], - Results.FromData); - Expect.isEmpty(docs, "There should have been no documents returned"); + VerifyNoDoc(await Custom.JsonSingle($"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id", + [new SqliteParameter("@id", "eighty")], Results.JsonFromData)); }) ]), TestList("NonQuery", @@ -757,6 +885,635 @@ public static class SqliteCSharpTests ]) ]); + /// Integration tests for the Json module of the SQLite library + private static readonly Test JsonTests = TestList("Json", + [ + TestList("All", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = await SqliteDb.BuildDb(); + + await Document.Insert(SqliteDb.TableName, new SubDocument { Foo = "one", Bar = "two" }); + await Document.Insert(SqliteDb.TableName, new SubDocument { Foo = "three", Bar = "four" }); + await Document.Insert(SqliteDb.TableName, new SubDocument { Foo = "five", Bar = "six" }); + + var json = await Json.All(SqliteDb.TableName); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Foo":"one","Bar":"two"}""", "The first document was not found"); + Expect.stringContains(json, """{"Foo":"three","Bar":"four"}""", "The second document was not found"); + Expect.stringContains(json, """{"Foo":"five","Bar":"six"}""", "The third document was not found"); + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = await SqliteDb.BuildDb(); + VerifyEmpty(await Json.All(SqliteDb.TableName)); + }) + ]), + TestList("AllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal(await Json.AllOrdered(SqliteDb.TableName, [Field.Named("n:NumValue")]), + $"[{JsonDocument.One},{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal(await Json.AllOrdered(SqliteDb.TableName, [Field.Named("n:NumValue DESC")]), + $"[{JsonDocument.Five},{JsonDocument.Four},{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One}]", + "The documents were not ordered correctly"); + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal(await Json.AllOrdered(SqliteDb.TableName, [Field.Named("Id DESC")]), + $"[{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + }) + ]), + TestList("ById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal(await Json.ById(SqliteDb.TableName, "two"), JsonDocument.Two, + "The incorrect document was returned"); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + VerifyNoDoc(await Json.ById(SqliteDb.TableName, "three hundred eighty-seven")); + }) + ]), + TestList("ByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + var json = await Json.ByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)]); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.Four, "Document `four` should have been returned"); + Expect.stringContains(json, JsonDocument.Five, "Document `five` should have been returned"); + }), + TestCase("succeeds when documents are found using IN with numeric field", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.ByFields(SqliteDb.TableName, FieldMatch.All, [Field.In("NumValue", [2, 4, 6, 8])]), + $"[{JsonDocument.Three}]", "There should have been one document returned"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + VerifyEmpty(await Json.ByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 100)])); + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await Definition.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(SqliteDb.TableName, doc); + + var json = await Json.ByFields(SqliteDb.TableName, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["c"])]); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Id":"first","Values":["a","b","c"]}""", + "Document `first` should have been returned"); + Expect.stringContains(json, """{"Id":"second","Values":["c","d","e"]}""", + "Document `second` should have been returned"); + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await Definition.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(SqliteDb.TableName, doc); + VerifyEmpty(await Json.ByFields(SqliteDb.TableName, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["j"])])); + }) + ]), + TestList("ByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.ByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)], + [Field.Named("Id")]), $"[{JsonDocument.Five},{JsonDocument.Four}]", + "Incorrect documents were returned"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.ByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)], + [Field.Named("Id DESC")]), $"[{JsonDocument.Four},{JsonDocument.Five}]", + "Incorrect documents were returned"); + }), + TestCase("succeeds when sorting case-sensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.ByFieldsOrdered(SqliteDb.TableName, FieldMatch.All, [Field.LessOrEqual("NumValue", 10)], + [Field.Named("Value")]), + $"[{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Two}]", "Documents not ordered correctly"); + }), + TestCase("succeeds when sorting case-insensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.ByFieldsOrdered(SqliteDb.TableName, FieldMatch.All, [Field.LessOrEqual("NumValue", 10)], + [Field.Named("i:Value")]), + $"[{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.One}]", "Documents not ordered correctly"); + }) + ]), + TestList("FirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.FirstByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "another")]), + JsonDocument.Two, "The incorrect document was returned"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + var json = await Json.FirstByFields(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")]); + Expect.notEqual(json, "{}", "There should have been a document returned"); + VerifyAny(json, [JsonDocument.Two, JsonDocument.Four]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + VerifyNoDoc(await Json.FirstByFields(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "absent")])); + }) + ]), + TestList("FirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.FirstByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar")]), JsonDocument.Two, + "An incorrect document was returned"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.FirstByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar DESC")]), JsonDocument.Four, + "An incorrect document was returned"); + }) + ]), + TestList("WriteAll", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = await SqliteDb.BuildDb(); + + await Document.Insert(SqliteDb.TableName, new SubDocument { Foo = "one", Bar = "two" }); + await Document.Insert(SqliteDb.TableName, new SubDocument { Foo = "three", Bar = "four" }); + await Document.Insert(SqliteDb.TableName, new SubDocument { Foo = "five", Bar = "six" }); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteAll(SqliteDb.TableName, writer); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Foo":"one","Bar":"two"}""", "The first document was not found"); + Expect.stringContains(json, """{"Foo":"three","Bar":"four"}""", + "The second document was not found"); + Expect.stringContains(json, """{"Foo":"five","Bar":"six"}""", "The third document was not found"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = await SqliteDb.BuildDb(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteAll(SqliteDb.TableName, writer); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteAllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.One},{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue DESC")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.Five},{JsonDocument.Four},{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One}]", + "The documents were not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteAllOrdered(SqliteDb.TableName, writer, [Field.Named("Id DESC")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteById(SqliteDb.TableName, writer, "two"); + Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteById(SqliteDb.TableName, writer, "three hundred eighty-seven"); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)]); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.Four, "Document `four` should have been returned"); + Expect.stringContains(json, JsonDocument.Five, "Document `five` should have been returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when documents are found using IN with numeric field", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.In("NumValue", [2, 4, 6, 8])]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three}]", + "There should have been one document returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 100)]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await Definition.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(SqliteDb.TableName, doc); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["c"])]); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Id":"first","Values":["a","b","c"]}""", + "Document `first` should have been returned"); + Expect.stringContains(json, """{"Id":"second","Values":["c","d","e"]}""", + "Document `second` should have been returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await Definition.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(SqliteDb.TableName, doc); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["j"])]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)], [Field.Named("Id")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Five},{JsonDocument.Four}]", + "Incorrect documents were returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)], [Field.Named("Id DESC")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Four},{JsonDocument.Five}]", + "Incorrect documents were returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting case-sensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("Value")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Two}]", + "Documents not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting case-insensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("i:Value")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.One}]", + "Documents not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteFirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "another")]); + Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")]); + var json = StreamText(stream); + Expect.notEqual(json, "{}", "There should have been a document returned"); + VerifyAny(json, [JsonDocument.Two, JsonDocument.Four]); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "absent")]); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]), + TestList("WriteFirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar")]); + Expect.equal(StreamText(stream), JsonDocument.Two, "An incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar DESC")]); + Expect.equal(StreamText(stream), JsonDocument.Four, "An incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } + }) + ]) + ]); + /// /// Integration tests for the Update module of the SQLite library /// @@ -1006,6 +1763,7 @@ public static class SqliteCSharpTests CountTests, ExistsTests, FindTests, + JsonTests, UpdateTests, PatchTests, RemoveFieldsTests, diff --git a/src/Tests.CSharp/Types.cs b/src/Tests.CSharp/Types.cs index 3acb0d0..e388084 100644 --- a/src/Tests.CSharp/Types.cs +++ b/src/Tests.CSharp/Types.cs @@ -18,7 +18,7 @@ public class JsonDocument public string Value { get; set; } = ""; public int NumValue { get; set; } = 0; public SubDocument? Sub { get; set; } = null; - + /// /// A set of documents used for integration tests /// @@ -30,6 +30,22 @@ public class JsonDocument new() { Id = "four", Value = "purple", NumValue = 17, Sub = new() { Foo = "green", Bar = "red" } }, new() { Id = "five", Value = "purple", NumValue = 18 } ]; + + /// The JSON for document ID `one` + public static string One = """{"Id":"one","Value":"FIRST!","NumValue":0,"Sub":null}"""; + + /// The JSON for document ID `two` + public static string Two = """{"Id":"two","Value":"another","NumValue":10,"Sub":{"Foo":"green","Bar":"blue"}}"""; + + /// The JSON for document ID `three` + public static string Three = """{"Id":"three","Value":"","NumValue":4,"Sub":null}"""; + + /// The JSON for document ID `four` + public static string Four = """{"Id":"four","Value":"purple","NumValue":17,"Sub":{"Foo":"green","Bar":"red"}}"""; + + /// The JSON for document ID `five` + public static string Five = """{"Id":"five","Value":"purple","NumValue":18,"Sub":null}"""; + } public class ArrayDocument diff --git a/src/Tests/CommonTests.fs b/src/Tests/CommonTests.fs index 98f6fe8..40aeb8e 100644 --- a/src/Tests/CommonTests.fs +++ b/src/Tests/CommonTests.fs @@ -1,5 +1,7 @@ module CommonTests +open System.IO +open System.IO.Pipelines open BitBadger.Documents open Expecto @@ -484,6 +486,81 @@ let queryTests = testList "Query" [ ] ] +let private streamText (stream: Stream) = + stream.Position <- 0L + use reader = new StreamReader(stream) + reader.ReadToEnd() + +/// Unit tests for the PipeWriter module +let pipeWriterTests = testList "Extensions.PipeWriter" [ + testList "writeString" [ + testTask "succeeds when writer is open" { + use stream = new MemoryStream() + let writer = PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + try + let! result = PipeWriter.writeString writer "abc" + Expect.isTrue result "The write operation should have been successful" + Expect.equal (streamText stream) "abc" "The string was not written correctly" + finally + writer.Complete() + } + testTask "succeeds when writer is completed" { + use stream = new MemoryStream() + let writer = PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + do! writer.CompleteAsync() + + let! result = PipeWriter.writeString writer "abc" + Expect.isFalse result "The write operation should have returned false" + Expect.equal (streamText stream) "" "No text should have been written" + } + ] + testList "writeStrings" [ + testTask "succeeds with no strings" { + use stream = new MemoryStream() + let writer = PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + try + do! PipeWriter.writeStrings writer [] + Expect.equal (streamText stream) "[]" "An empty sequence of strings was not written correctly" + finally + writer.Complete() + } + testTask "succeeds with one strings" { + use stream = new MemoryStream() + let writer = PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + try + do! PipeWriter.writeStrings writer [ "le-test" ] + Expect.equal (streamText stream) "[le-test]" "A sequence of one string was not written correctly" + finally + writer.Complete() + } + testTask "succeeds with many strings" { + use stream = new MemoryStream() + let writer = PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + try + do! PipeWriter.writeStrings writer [ "z"; "y"; "x"; "c"; "b"; "a" ] + Expect.equal (streamText stream) "[z,y,x,c,b,a]" "A sequence of many strings was not written correctly" + finally + writer.Complete() + } + testTask "succeeds when the writer is completed early" { + use stream = new MemoryStream() + let writer = PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + let items = seq { + "a" + "b" + "c" + writer.Complete() + "d" + "e" + "f" + } + + do! PipeWriter.writeStrings writer items + Expect.equal (streamText stream) "[a,b,c" "The writing should have stopped when the writer completed" + } + ] +] + /// Tests which do not hit the database let all = testList "Common" [ comparisonTests @@ -492,5 +569,6 @@ let all = testList "Common" [ parameterNameTests autoIdTests queryTests + pipeWriterTests testSequenced configurationTests ] diff --git a/src/Tests/PostgresExtensionTests.fs b/src/Tests/PostgresExtensionTests.fs index 60fcf31..d780072 100644 --- a/src/Tests/PostgresExtensionTests.fs +++ b/src/Tests/PostgresExtensionTests.fs @@ -1,5 +1,7 @@ module PostgresExtensionTests +open System.IO +open System.IO.Pipelines open BitBadger.Documents open BitBadger.Documents.Postgres open BitBadger.Documents.Tests @@ -13,6 +15,73 @@ let private mkConn (db: ThrowawayPostgresDb) = conn.Open() conn +/// Set up a stream writer for a test +let private writeStream (stream: Stream) = + PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + +/// Get the text of the given stream +let private streamText (stream: Stream) = + stream.Position <- 0L + use reader = new StreamReader(stream) + reader.ReadToEnd() + +/// Verify a JSON array begins with "[" and ends with "]" +let private verifyBeginEnd json = + Expect.stringStarts json "[" "The array should have started with `[`" + Expect.stringEnds json "]" "The array should have ended with `]`" + +/// Verify the presence of a document by its ID +let private verifyDocById json docId = + Expect.stringContains json $"{{\"Id\": \"%s{docId}\"," $"Document `{docId}` not present" + +/// Verify the presence of a document by its ID +let private verifySingleById json docId = + verifyBeginEnd json + Expect.stringContains json $"{{\"Id\": \"%s{docId}\"," $"Document `{docId}` not present" + +/// Verify the presence of any of the given document IDs in the given JSON +let private verifyAnyById (json: string) (docIds: string list) = + match docIds |> List.tryFind (fun it -> json.Contains $"{{\"Id\": \"{it}\"") with + | Some _ -> () + | None -> + let ids = docIds |> String.concat ", " + Expect.isTrue false $"Could not find any of IDs {ids} in {json}" + +/// Verify the JSON for `all` returning data +let private verifyAllData json = + verifyBeginEnd json + [ "one"; "two"; "three"; "four"; "five" ] |> List.iter (verifyDocById json) + +/// Verify an empty JSON array +let private verifyEmpty json = + Expect.equal json "[]" "There should be no documents returned" + +/// Verify an empty JSON document +let private verifyNoDoc json = + Expect.equal json "{}" "There should be no document returned" + +/// Verify the JSON for an ordered query +let private verifyExpectedOrder (json: string) idFirst idSecond idThird idFourth idFifth = + let firstIdx = json.IndexOf $"{{\"Id\": \"%s{idFirst}\"," + let secondIdx = json.IndexOf $"{{\"Id\": \"%s{idSecond}\"," + verifyBeginEnd json + Expect.isGreaterThan secondIdx firstIdx $"`{idSecond}` should have been after `{idFirst}`" + match idThird with + | Some id3 -> + let thirdIdx = json.IndexOf $"{{\"Id\": \"%s{id3}\"," + Expect.isGreaterThan thirdIdx secondIdx $"`{id3}` should have been after `{idSecond}`" + match idFourth with + | Some id4 -> + let fourthIdx = json.IndexOf $"{{\"Id\": \"%s{id4}\"," + Expect.isGreaterThan fourthIdx thirdIdx $"`{id4}` should have been after `{id3}`" + match idFifth with + | Some id5 -> + let fifthIdx = json.IndexOf $"{{\"Id\": \"%s{id5}\"," + Expect.isGreaterThan fifthIdx fourthIdx $"`{id5}` should have been after `{id4}`" + | None -> () + | None -> () + | None -> () + /// Integration tests for the F# extensions on the NpgsqlConnection data type let integrationTests = let loadDocs (conn: NpgsqlConnection) = backgroundTask { @@ -41,6 +110,65 @@ let integrationTests = Expect.isEmpty docs "There should have been no documents returned" } ] + testList "customJsonArray" [ + testTask "succeeds when data is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + let! docs = conn.customJsonArray (Query.find PostgresDb.TableName) [] jsonFromData + Expect.stringStarts docs "[" "The JSON array should have started with `[`" + Expect.hasLength ((string docs).Split "{\"Id\":") 6 "There should have been 5 documents returned" + Expect.stringEnds docs "]" "The JSON array should have ended with `[`" + } + testTask "succeeds when data is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + let! docs = + conn.customJsonArray + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath" + [ "@path", Sql.string "$.NumValue ? (@ > 100)" ] + jsonFromData + Expect.equal docs "[]" "There should have been no documents returned" + } + ] + testList "writeCustomJsonArray" [ + testTask "succeeds when data is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeCustomJsonArray (Query.find PostgresDb.TableName) [] writer jsonFromData + let docs = streamText stream + Expect.stringStarts docs "[" "The JSON array should have started with `[`" + Expect.hasLength (docs.Split "{\"Id\":") 6 "There should have been 5 documents returned" + Expect.stringEnds docs "]" "The JSON array should have ended with `[`" + finally + writer.Complete() + } + testTask "succeeds when data is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeCustomJsonArray + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath" + [ "@path", Sql.string "$.NumValue ? (@ > 100)" ] + writer + jsonFromData + Expect.equal (streamText stream) "[]" "There should have been no documents returned" + finally + writer.Complete() + } + ] testList "customSingle" [ testTask "succeeds when a row is found" { use db = PostgresDb.BuildDb() @@ -68,6 +196,34 @@ let integrationTests = Expect.isNone doc "There should not have been a document returned" } ] + testList "customJsonSingle" [ + testTask "succeeds when a row is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + let! doc = + conn.customJsonSingle + $"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id" + [ "@id", Sql.string "one"] + jsonFromData + Expect.stringStarts doc "{" "The document should have started with an open brace" + Expect.stringContains doc "\"Id\": \"one\"" "An incorrect document was returned" + Expect.stringEnds doc "}" "The document should have ended with a closing brace" + } + testTask "succeeds when a row is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + let! doc = + conn.customJsonSingle + $"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id" + [ "@id", Sql.string "eighty" ] + jsonFromData + Expect.equal doc "{}" "There should not have been a document returned" + } + ] testList "customNonQuery" [ testTask "succeeds when operating on data" { use db = PostgresDb.BuildDb() @@ -106,7 +262,7 @@ let integrationTests = let keyExists () = conn.customScalar "SELECT EXISTS (SELECT 1 FROM pg_class WHERE relname = 'idx_ensured_key') AS it" [] toExists - + let! exists = tableExists () let! alsoExists = keyExists () Expect.isFalse exists "The table should not exist already" @@ -124,7 +280,7 @@ let integrationTests = let indexExists () = conn.customScalar "SELECT EXISTS (SELECT 1 FROM pg_class WHERE relname = 'idx_ensured_document') AS it" [] toExists - + let! exists = indexExists () Expect.isFalse exists "The index should not exist already" @@ -139,7 +295,7 @@ let integrationTests = let indexExists () = conn.customScalar "SELECT EXISTS (SELECT 1 FROM pg_class WHERE relname = 'idx_ensured_test') AS it" [] toExists - + let! exists = indexExists () Expect.isFalse exists "The index should not exist already" @@ -213,7 +369,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + let! theCount = conn.countByFields PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] Expect.equal theCount 2 "There should have been 2 matching documents" } @@ -332,7 +488,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + let! results = conn.findAllOrdered PostgresDb.TableName [ Field.Named "n:NumValue" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -344,7 +500,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + let! results = conn.findAllOrdered PostgresDb.TableName [ Field.Named "n:NumValue DESC" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -356,7 +512,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + let! results = conn.findAllOrdered PostgresDb.TableName [ Field.Named "Id DESC" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -677,6 +833,886 @@ let integrationTests = Expect.equal "four" doc.Value.Id "An incorrect document was returned" } ] + testList "jsonAll" [ + testTask "succeeds when there is data" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonAll PostgresDb.TableName + verifyAllData json + } + testTask "succeeds when there is no data" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + let! json = conn.jsonAll PostgresDb.TableName + verifyEmpty json + } + ] + testList "jsonAllOrdered" [ + testTask "succeeds when ordering numerically" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonAllOrdered PostgresDb.TableName [ Field.Named "n:NumValue" ] + verifyExpectedOrder json "one" "three" (Some "two") (Some "four") (Some "five") + } + testTask "succeeds when ordering numerically descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonAllOrdered PostgresDb.TableName [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder json "five" "four" (Some "two") (Some "three") (Some "one") + } + testTask "succeeds when ordering alphabetically" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonAllOrdered PostgresDb.TableName [ Field.Named "Id DESC" ] + verifyExpectedOrder json "two" "three" (Some "one") (Some "four") (Some "five") + } + ] + testList "jsonById" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + let! json = conn.jsonById PostgresDb.TableName "two" + Expect.stringStarts json """{"Id": "two",""" "An incorrect document was returned" + Expect.stringEnds json "}" "JSON should have ended with this document" + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonById PostgresDb.TableName "three hundred eighty-seven" + verifyNoDoc json + } + ] + testList "jsonByFields" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByFields + PostgresDb.TableName All [ Field.In "Value" [ "purple"; "blue" ]; Field.Exists "Sub" ] + verifySingleById json "four" + } + testTask "succeeds when documents are found using IN with numeric field" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonByFields PostgresDb.TableName All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + verifySingleById json "three" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByFields + PostgresDb.TableName All [ Field.Equal "Value" "mauve"; Field.NotEqual "NumValue" 40 ] + verifyEmpty json + } + testTask "succeeds for InArray when matching documents exist" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! conn.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert PostgresDb.TableName doc + + let! json = + conn.jsonByFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] + verifyBeginEnd json + verifyDocById json "first" + verifyDocById json "second" + } + testTask "succeeds for InArray when no matching documents exist" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! conn.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert PostgresDb.TableName doc + let! json = + conn.jsonByFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] + verifyEmpty json + } + ] + testList "jsoByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByFieldsOrdered + PostgresDb.TableName All [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyExpectedOrder json "five" "four" None None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByFieldsOrdered + PostgresDb.TableName All [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyExpectedOrder json "four" "five" None None None + } + ] + testList "jsonByContains" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + let! json = conn.jsonByContains PostgresDb.TableName {| Sub = {| Foo = "green" |} |} + verifyBeginEnd json + verifyDocById json "two" + verifyDocById json "four" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonByContains PostgresDb.TableName {| Value = "mauve" |} + verifyEmpty json + } + ] + testList "jsonByContainsOrdered" [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByContainsOrdered + PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar" ] + verifyExpectedOrder json "two" "four" None None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByContainsOrdered + PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar DESC" ] + verifyExpectedOrder json "four" "two" None None None + } + ] + testList "jsonByJsonPath" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + let! json = conn.jsonByJsonPath PostgresDb.TableName "$.NumValue ? (@ < 15)" + verifyBeginEnd json + verifyDocById json "one" + verifyDocById json "two" + verifyDocById json "three" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonByJsonPath PostgresDb.TableName "$.NumValue ? (@ < 0)" + verifyEmpty json + } + ] + testList "jsonByJsonPathOrdered" [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByJsonPathOrdered PostgresDb.TableName "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue" ] + verifyExpectedOrder json "one" "three" (Some "two") None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByJsonPathOrdered + PostgresDb.TableName "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder json "two" "three" (Some "one") None None + } + ] + testList "jsonFirstByFields" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByFields PostgresDb.TableName Any [ Field.Equal "Value" "another" ] + verifyDocById json "two" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByFields PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] + verifyAnyById json [ "five"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByFields PostgresDb.TableName Any [ Field.Equal "Value" "absent" ] + verifyNoDoc json + } + ] + testList "jsonFirstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonFirstByFieldsOrdered + PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyDocById json "five" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonFirstByFieldsOrdered + PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyDocById json "four" + } + ] + testList "jsonFirstByContains" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByContains PostgresDb.TableName {| Value = "another" |} + verifyDocById json "two" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByContains PostgresDb.TableName {| Sub = {| Foo = "green" |} |} + verifyAnyById json [ "two"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByContains PostgresDb.TableName {| Value = "absent" |} + verifyNoDoc json + } + ] + testList "jsonFirstByContainsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonFirstByContainsOrdered + PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Value" ] + verifyDocById json "two" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonFirstByContainsOrdered + PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Value DESC" ] + verifyDocById json "four" + } + ] + testList "jsonFirstByJsonPath" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByJsonPath PostgresDb.TableName """$.Value ? (@ == "FIRST!")""" + verifyDocById json "one" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByJsonPath PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" + verifyAnyById json [ "two"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByJsonPath PostgresDb.TableName """$.Id ? (@ == "nope")""" + verifyNoDoc json + } + ] + testList "jsonFirstByJsonPathOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonFirstByJsonPathOrdered + PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar" ] + verifyDocById json "two" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonFirstByJsonPathOrdered + PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar DESC" ] + verifyDocById json "four" + } + ] + testList "writeJsonAll" [ + testTask "succeeds when there is data" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonAll PostgresDb.TableName writer + verifyAllData (streamText stream) + finally + writer.Complete() + } + testTask "succeeds when there is no data" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonAll PostgresDb.TableName writer + verifyEmpty (streamText stream) + finally + writer.Complete() + } + ] + testList "writeJsonAllOrdered" [ + testTask "succeeds when ordering numerically" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue" ] + verifyExpectedOrder (streamText stream) "one" "three" (Some "two") (Some "four") (Some "five") + finally + writer.Complete() + } + testTask "succeeds when ordering numerically descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder (streamText stream) "five" "four" (Some "two") (Some "three") (Some "one") + finally + writer.Complete() + } + testTask "succeeds when ordering alphabetically" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonAllOrdered PostgresDb.TableName writer [ Field.Named "Id DESC" ] + verifyExpectedOrder (streamText stream) "two" "three" (Some "one") (Some "four") (Some "five") + finally + writer.Complete() + } + ] + testList "writeJsonById" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonById PostgresDb.TableName writer "two" + let json = streamText stream + Expect.stringStarts json """{"Id": "two",""" "An incorrect document was returned" + Expect.stringEnds json "}" "JSON should have ended with this document" + finally + writer.Complete() + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonById PostgresDb.TableName writer "three hundred eighty-seven" + verifyNoDoc (streamText stream) + finally + writer.Complete() + } + ] + testList "writeJsonByFields" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFields + PostgresDb.TableName + writer + All + [ Field.In "Value" [ "purple"; "blue" ]; Field.Exists "Sub" ] + verifySingleById (streamText stream) "four" + finally + writer.Complete() + } + testTask "succeeds when documents are found using IN with numeric field" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFields PostgresDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + verifySingleById (streamText stream) "three" + finally + writer.Complete() + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFields + PostgresDb.TableName + writer + All + [ Field.Equal "Value" "mauve"; Field.NotEqual "NumValue" 40 ] + verifyEmpty (streamText stream) + finally + writer.Complete() + } + testTask "succeeds for InArray when matching documents exist" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! conn.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert PostgresDb.TableName doc + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFields + PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] + let json = streamText stream + verifyBeginEnd json + verifyDocById json "first" + verifyDocById json "second" + finally + writer.Complete() + } + testTask "succeeds for InArray when no matching documents exist" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! conn.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert PostgresDb.TableName doc + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFields + PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] + verifyEmpty (streamText stream) + finally + writer.Complete() + } + ] + testList "writeJsonByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFieldsOrdered + PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyExpectedOrder (streamText stream) "five" "four" None None None + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFieldsOrdered + PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyExpectedOrder (streamText stream) "four" "five" None None None + finally + writer.Complete() + } + ] + testList "writeJsonByContains" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} + let json = streamText stream + verifyBeginEnd json + verifyDocById json "two" + verifyDocById json "four" + finally + writer.Complete() + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByContains PostgresDb.TableName writer {| Value = "mauve" |} + verifyEmpty (streamText stream) + finally + writer.Complete() + } + ] + testList "writeJsonByContainsOrdered" [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar" ] + verifyExpectedOrder (streamText stream) "two" "four" None None None + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar DESC" ] + verifyExpectedOrder (streamText stream) "four" "two" None None None + finally + writer.Complete() + } + ] + testList "writeJsonByJsonPath" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 15)" + let json = streamText stream + verifyBeginEnd json + verifyDocById json "one" + verifyDocById json "two" + verifyDocById json "three" + finally + writer.Complete() + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 0)" + verifyEmpty (streamText stream) + finally + writer.Complete() + } + ] + testList "writeJsonByJsonPathOrdered" [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByJsonPathOrdered + PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue" ] + verifyExpectedOrder (streamText stream) "one" "three" (Some "two") None None + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByJsonPathOrdered + PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder (streamText stream) "two" "three" (Some "one") None None + finally + writer.Complete() + } + ] + testList "writeJsonFirstByFields" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "another" ] + verifyDocById (streamText stream) "two" + finally + writer.Complete() + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] + verifyAnyById (streamText stream) [ "five"; "four" ] + finally + writer.Complete() + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "absent" ] + verifyNoDoc (streamText stream) + finally + writer.Complete() + } + ] + testList "writeJsonFirstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByFieldsOrdered + PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyDocById (streamText stream) "five" + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByFieldsOrdered + PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyDocById (streamText stream) "four" + finally + writer.Complete() + } + ] + testList "writeJsonFirstByContains" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByContains PostgresDb.TableName writer {| Value = "another" |} + verifyDocById (streamText stream) "two" + finally + writer.Complete() + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} + verifyAnyById (streamText stream) [ "two"; "four" ] + finally + writer.Complete() + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByContains PostgresDb.TableName writer {| Value = "absent" |} + verifyNoDoc (streamText stream) + finally + writer.Complete() + } + ] + testList "writeJsonFirstByContainsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value" ] + verifyDocById (streamText stream) "two" + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value DESC" ] + verifyDocById (streamText stream) "four" + finally + writer.Complete() + } + ] + testList "writeJsonFirstByJsonPath" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByJsonPath PostgresDb.TableName writer """$.Value ? (@ == "FIRST!")""" + verifyDocById (streamText stream) "one" + finally + writer.Complete() + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByJsonPath PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" + verifyAnyById (streamText stream) [ "two"; "four" ] + finally + writer.Complete() + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByJsonPath PostgresDb.TableName writer """$.Id ? (@ == "nope")""" + verifyNoDoc (streamText stream) + finally + writer.Complete() + } + ] + testList "writeJsonFirstByJsonPathOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByJsonPathOrdered + PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar" ] + verifyDocById (streamText stream) "two" + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByJsonPathOrdered + PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar DESC" ] + verifyDocById (streamText stream) "four" + finally + writer.Complete() + } + ] testList "updateById" [ testTask "succeeds when a document is updated" { use db = PostgresDb.BuildDb() @@ -694,7 +1730,7 @@ let integrationTests = use conn = mkConn db let! before = conn.countAll PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! conn.updateById PostgresDb.TableName "test" { emptyDoc with Id = "x"; Sub = Some { Foo = "blue"; Bar = "red" } } @@ -707,7 +1743,7 @@ let integrationTests = do! loadDocs conn do! conn.updateByFunc - PostgresDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + PostgresDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } let! after = conn.findById PostgresDb.TableName "one" Expect.isSome after "There should have been a document returned post-update" Expect.equal @@ -720,10 +1756,10 @@ let integrationTests = use conn = mkConn db let! before = conn.countAll PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! conn.updateByFunc - PostgresDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + PostgresDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } } ] testList "patchById" [ @@ -731,7 +1767,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + do! conn.patchById PostgresDb.TableName "one" {| NumValue = 44 |} let! after = conn.findById PostgresDb.TableName "one" Expect.isSome after "There should have been a document returned post-update" @@ -742,7 +1778,7 @@ let integrationTests = use conn = mkConn db let! before = conn.countAll PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! conn.patchById PostgresDb.TableName "test" {| Foo = "green" |} } @@ -752,7 +1788,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + do! conn.patchByFields PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] {| NumValue = 77 |} let! after = conn.countByFields PostgresDb.TableName Any [ Field.Equal "NumValue" "77" ] Expect.equal after 2 "There should have been 2 documents returned" @@ -762,7 +1798,7 @@ let integrationTests = use conn = mkConn db let! before = conn.countAll PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! conn.patchByFields PostgresDb.TableName Any [ Field.Equal "Value" "burgundy" ] {| Foo = "green" |} } @@ -772,7 +1808,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + do! conn.patchByContains PostgresDb.TableName {| Value = "purple" |} {| NumValue = 77 |} let! after = conn.countByContains PostgresDb.TableName {| NumValue = 77 |} Expect.equal after 2 "There should have been 2 documents returned" @@ -782,7 +1818,7 @@ let integrationTests = use conn = mkConn db let! before = conn.countAll PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! conn.patchByContains PostgresDb.TableName {| Value = "burgundy" |} {| Foo = "green" |} } @@ -792,7 +1828,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + do! conn.patchByJsonPath PostgresDb.TableName "$.NumValue ? (@ > 10)" {| NumValue = 1000 |} let! after = conn.countByJsonPath PostgresDb.TableName "$.NumValue ? (@ > 999)" Expect.equal after 2 "There should have been 2 documents returned" @@ -802,7 +1838,7 @@ let integrationTests = use conn = mkConn db let! before = conn.countAll PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! conn.patchByJsonPath PostgresDb.TableName "$.NumValue ? (@ < 0)" {| Foo = "green" |} } @@ -834,14 +1870,14 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + // This not raising an exception is the test do! conn.removeFieldsById PostgresDb.TableName "two" [ "AFieldThatIsNotThere" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() use conn = mkConn db - + // This not raising an exception is the test do! conn.removeFieldsById PostgresDb.TableName "two" [ "Value" ] } @@ -874,14 +1910,14 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + // This not raising an exception is the test do! conn.removeFieldsByFields PostgresDb.TableName Any [ Field.Equal "NumValue" "17" ] [ "Nothing" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() use conn = mkConn db - + // This not raising an exception is the test do! conn.removeFieldsByFields PostgresDb.TableName Any [ Field.NotEqual "Abracadabra" "apple" ] [ "Value" ] @@ -914,14 +1950,14 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + // This not raising an exception is the test do! conn.removeFieldsByContains PostgresDb.TableName {| NumValue = 17 |} [ "Nothing" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() use conn = mkConn db - + // This not raising an exception is the test do! conn.removeFieldsByContains PostgresDb.TableName {| Abracadabra = "apple" |} [ "Value" ] } @@ -953,14 +1989,14 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + // This not raising an exception is the test do! conn.removeFieldsByJsonPath PostgresDb.TableName "$.NumValue ? (@ == 17)" [ "Nothing" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() use conn = mkConn db - + // This not raising an exception is the test do! conn.removeFieldsByJsonPath PostgresDb.TableName "$.Abracadabra ? (@ == \"apple\")" [ "Value" ] } diff --git a/src/Tests/PostgresTests.fs b/src/Tests/PostgresTests.fs index 6043d9c..8fd4e73 100644 --- a/src/Tests/PostgresTests.fs +++ b/src/Tests/PostgresTests.fs @@ -1,5 +1,7 @@ module PostgresTests +open System.IO +open System.IO.Pipelines open Expecto open BitBadger.Documents open BitBadger.Documents.Postgres @@ -68,7 +70,7 @@ let parametersTests = testList "Parameters" [ Expect.equal (idParam "99") ("@id", Sql.string "99") "String ID parameter not constructed correctly" } test "succeeds for non-numeric non-string ID" { - let target = { new obj() with override _.ToString() = "ToString was called" } + let target = { new obj() with override _.ToString() = "ToString was called" } Expect.equal (idParam target) ("@id", Sql.string "ToString was called") @@ -275,6 +277,17 @@ let loadDocs () = backgroundTask { for doc in testDocuments do do! insert PostgresDb.TableName doc } +/// Set up a stream writer for a test +let writeStream (stream: Stream) = + PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + +/// Get the text of the given stream +let streamText (stream: Stream) = + stream.Position <- 0L + use reader = new StreamReader(stream) + reader.ReadToEnd() + + /// Integration tests for the Configuration module of the PostgreSQL library let configurationTests = testList "Configuration" [ test "useDataSource disposes existing source" { @@ -317,6 +330,62 @@ let customTests = testList "Custom" [ Expect.isEmpty docs "There should have been no documents returned" } ] + testList "jsonArray" [ + testTask "succeeds when data is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + let! docs = Custom.jsonArray (Query.find PostgresDb.TableName) [] jsonFromData + Expect.stringStarts docs "[" "The JSON array should have started with `[`" + Expect.hasLength ((string docs).Split "{\"Id\":") 6 "There should have been 5 documents returned" + Expect.stringEnds docs "]" "The JSON array should have ended with `]`" + } + testTask "succeeds when data is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + let! docs = + Custom.jsonArray + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath" + [ "@path", Sql.string "$.NumValue ? (@ > 100)" ] + jsonFromData + Expect.equal docs "[]" "There should have been no documents returned" + } + ] + testList "writeJsonArray" [ + testTask "succeeds when data is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + + try + do! Custom.writeJsonArray (Query.find PostgresDb.TableName) [] writer jsonFromData + let docs = streamText stream + Expect.stringStarts docs "[" "The JSON array should have started with `[`" + Expect.hasLength (docs.Split "{\"Id\":") 6 "There should have been 5 documents returned" + Expect.stringEnds docs "]" "The JSON array should have ended with `]`" + finally + writer.Complete() + } + testTask "succeeds when data is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Custom.writeJsonArray + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath" + [ "@path", Sql.string "$.NumValue ? (@ > 100)" ] + writer + jsonFromData + Expect.equal (streamText stream) "[]" "There should have been no documents returned" + finally + writer.Complete() + } + ] testList "single" [ testTask "succeeds when a row is found" { use db = PostgresDb.BuildDb() @@ -342,6 +411,32 @@ let customTests = testList "Custom" [ Expect.isNone doc "There should not have been a document returned" } ] + testList "jsonSingle" [ + testTask "succeeds when a row is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + let! doc = + Custom.jsonSingle + $"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id" + [ "@id", Sql.string "one"] + jsonFromData + Expect.stringStarts doc "{" "The document should have started with an open brace" + Expect.stringContains doc "\"Id\": \"one\"" "An incorrect document was returned" + Expect.stringEnds doc "}" "The document should have ended with a closing brace" + } + testTask "succeeds when a row is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + let! doc = + Custom.jsonSingle + $"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id" + [ "@id", Sql.string "eighty" ] + jsonFromData + Expect.equal doc "{}" "There should not have been a document returned" + } + ] testList "nonQuery" [ testTask "succeeds when operating on data" { use db = PostgresDb.BuildDb() @@ -380,7 +475,7 @@ let definitionTests = testList "Definition" [ let keyExists () = Custom.scalar "SELECT EXISTS (SELECT 1 FROM pg_class WHERE relname = 'idx_ensured_key') AS it" [] toExists - + let! exists = tableExists () let! alsoExists = keyExists () Expect.isFalse exists "The table should not exist already" @@ -397,7 +492,7 @@ let definitionTests = testList "Definition" [ let indexExists () = Custom.scalar "SELECT EXISTS (SELECT 1 FROM pg_class WHERE relname = 'idx_ensured_document') AS it" [] toExists - + let! exists = indexExists () Expect.isFalse exists "The index should not exist already" @@ -410,7 +505,7 @@ let definitionTests = testList "Definition" [ use db = PostgresDb.BuildDb() let indexExists () = Custom.scalar "SELECT EXISTS (SELECT 1 FROM pg_class WHERE relname = 'idx_ensured_test') AS it" [] toExists - + let! exists = indexExists () Expect.isFalse exists "The index should not exist already" @@ -451,12 +546,12 @@ let documentTests = testList "Document" [ use db = PostgresDb.BuildDb() let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should be no documents in the table" - + do! insert PostgresDb.TableName { Key = 0; Text = "one" } do! insert PostgresDb.TableName { Key = 0; Text = "two" } do! insert PostgresDb.TableName { Key = 77; Text = "three" } do! insert PostgresDb.TableName { Key = 0; Text = "four" } - + let! after = Find.allOrdered PostgresDb.TableName [ Field.Named "n:Key" ] Expect.hasLength after 4 "There should have been 4 documents returned" Expect.equal (after |> List.map _.Key) [ 1; 2; 77; 78 ] "The IDs were not generated correctly" @@ -470,12 +565,12 @@ let documentTests = testList "Document" [ use db = PostgresDb.BuildDb() let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should be no documents in the table" - + do! insert PostgresDb.TableName { emptyDoc with Value = "one" } do! insert PostgresDb.TableName { emptyDoc with Value = "two" } do! insert PostgresDb.TableName { emptyDoc with Id = "abc123"; Value = "three" } do! insert PostgresDb.TableName { emptyDoc with Value = "four" } - + let! after = Find.all PostgresDb.TableName Expect.hasLength after 4 "There should have been 4 documents returned" Expect.hasCountOf after 3u (fun doc -> doc.Id.Length = 32) "Three of the IDs should have been GUIDs" @@ -490,12 +585,12 @@ let documentTests = testList "Document" [ use db = PostgresDb.BuildDb() let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should be no documents in the table" - + do! insert PostgresDb.TableName { emptyDoc with Value = "one" } do! insert PostgresDb.TableName { emptyDoc with Value = "two" } do! insert PostgresDb.TableName { emptyDoc with Id = "abc123"; Value = "three" } do! insert PostgresDb.TableName { emptyDoc with Value = "four" } - + let! after = Find.all PostgresDb.TableName Expect.hasLength after 4 "There should have been 4 documents returned" Expect.hasCountOf @@ -549,7 +644,7 @@ let countTests = testList "Count" [ testTask "succeeds when items are found" { use db = PostgresDb.BuildDb() do! loadDocs () - + let! theCount = Count.byFields PostgresDb.TableName Any [ Field.Between "NumValue" 15 20; Field.Equal "NumValue" 0 ] Expect.equal theCount 3 "There should have been 3 matching documents" @@ -557,7 +652,7 @@ let countTests = testList "Count" [ testTask "succeeds when items are not found" { use db = PostgresDb.BuildDb() do! loadDocs () - + let! theCount = Count.byFields PostgresDb.TableName All [ Field.Exists "Sub"; Field.Greater "NumValue" 100 ] Expect.equal theCount 0 "There should have been no matching documents" } @@ -672,7 +767,7 @@ let findTests = testList "Find" [ testTask "succeeds when ordering numerically" { use db = PostgresDb.BuildDb() do! loadDocs () - + let! results = Find.allOrdered PostgresDb.TableName [ Field.Named "n:NumValue" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -683,7 +778,7 @@ let findTests = testList "Find" [ testTask "succeeds when ordering numerically descending" { use db = PostgresDb.BuildDb() do! loadDocs () - + let! results = Find.allOrdered PostgresDb.TableName [ Field.Named "n:NumValue DESC" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -694,7 +789,7 @@ let findTests = testList "Find" [ testTask "succeeds when ordering alphabetically" { use db = PostgresDb.BuildDb() do! loadDocs () - + let! results = Find.allOrdered PostgresDb.TableName [ Field.Named "Id DESC" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -750,7 +845,7 @@ let findTests = testList "Find" [ use db = PostgresDb.BuildDb() do! Definition.ensureTable PostgresDb.TableName for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc - + let! docs = Find.byFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] @@ -760,7 +855,7 @@ let findTests = testList "Find" [ use db = PostgresDb.BuildDb() do! Definition.ensureTable PostgresDb.TableName for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc - + let! docs = Find.byFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] @@ -1016,6 +1111,856 @@ let findTests = testList "Find" [ ] ] +/// Verify a JSON array begins with "[" and ends with "]" +let private verifyBeginEnd json = + Expect.stringStarts json "[" "The array should have started with `[`" + Expect.stringEnds json "]" "The array should have ended with `]`" + +/// Verify the presence of a document by its ID +let private verifyDocById json docId = + Expect.stringContains json $"{{\"Id\": \"%s{docId}\"," $"Document `{docId}` not present" + +/// Verify the presence of a document by its ID +let private verifySingleById json docId = + verifyBeginEnd json + Expect.stringContains json $"{{\"Id\": \"%s{docId}\"," $"Document `{docId}` not present" + +/// Verify the presence of any of the given document IDs in the given JSON +let private verifyAnyById (json: string) (docIds: string list) = + match docIds |> List.tryFind (fun it -> json.Contains $"{{\"Id\": \"{it}\"") with + | Some _ -> () + | None -> + let ids = docIds |> String.concat ", " + Expect.isTrue false $"Could not find any of IDs {ids} in {json}" + +/// Verify the JSON for `all` returning data +let private verifyAllData json = + verifyBeginEnd json + [ "one"; "two"; "three"; "four"; "five" ] |> List.iter (verifyDocById json) + +/// Verify an empty JSON array +let private verifyEmpty json = + Expect.equal json "[]" "There should be no documents returned" + +/// Verify an empty JSON document +let private verifyNoDoc json = + Expect.equal json "{}" "There should be no document returned" + +/// Verify the JSON for an ordered query +let private verifyExpectedOrder (json: string) idFirst idSecond idThird idFourth idFifth = + let firstIdx = json.IndexOf $"{{\"Id\": \"%s{idFirst}\"," + let secondIdx = json.IndexOf $"{{\"Id\": \"%s{idSecond}\"," + verifyBeginEnd json + Expect.isGreaterThan secondIdx firstIdx $"`{idSecond}` should have been after `{idFirst}`" + match idThird with + | Some id3 -> + let thirdIdx = json.IndexOf $"{{\"Id\": \"%s{id3}\"," + Expect.isGreaterThan thirdIdx secondIdx $"`{id3}` should have been after `{idSecond}`" + match idFourth with + | Some id4 -> + let fourthIdx = json.IndexOf $"{{\"Id\": \"%s{id4}\"," + Expect.isGreaterThan fourthIdx thirdIdx $"`{id4}` should have been after `{id3}`" + match idFifth with + | Some id5 -> + let fifthIdx = json.IndexOf $"{{\"Id\": \"%s{id5}\"," + Expect.isGreaterThan fifthIdx fourthIdx $"`{id5}` should have been after `{id4}`" + | None -> () + | None -> () + | None -> () + +/// Integration tests for the Json module of the PostgreSQL library +let jsonTests = testList "Json" [ + testList "all" [ + testTask "succeeds when there is data" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.all PostgresDb.TableName + verifyAllData json + } + testTask "succeeds when there is no data" { + use db = PostgresDb.BuildDb() + let! json = Json.all PostgresDb.TableName + verifyEmpty json + } + ] + testList "allOrdered" [ + testTask "succeeds when ordering numerically" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.allOrdered PostgresDb.TableName [ Field.Named "n:NumValue" ] + verifyExpectedOrder json "one" "three" (Some "two") (Some "four") (Some "five") + } + testTask "succeeds when ordering numerically descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.allOrdered PostgresDb.TableName [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder json "five" "four" (Some "two") (Some "three") (Some "one") + } + testTask "succeeds when ordering alphabetically" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.allOrdered PostgresDb.TableName [ Field.Named "Id DESC" ] + verifyExpectedOrder json "two" "three" (Some "one") (Some "four") (Some "five") + } + ] + testList "byId" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + let! json = Json.byId PostgresDb.TableName "two" + Expect.stringStarts json """{"Id": "two",""" "An incorrect document was returned" + Expect.stringEnds json "}" "JSON should have ended with this document" + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.byId PostgresDb.TableName "three hundred eighty-seven" + verifyNoDoc json + } + ] + testList "byFields" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = + Json.byFields PostgresDb.TableName All [ Field.In "Value" [ "purple"; "blue" ]; Field.Exists "Sub" ] + verifySingleById json "four" + } + testTask "succeeds when documents are found using IN with numeric field" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.byFields PostgresDb.TableName All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + verifySingleById json "three" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = + Json.byFields PostgresDb.TableName All [ Field.Equal "Value" "mauve"; Field.NotEqual "NumValue" 40 ] + verifyEmpty json + } + testTask "succeeds for InArray when matching documents exist" { + use db = PostgresDb.BuildDb() + do! Definition.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc + + let! json = Json.byFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] + verifyBeginEnd json + verifyDocById json "first" + verifyDocById json "second" + } + testTask "succeeds for InArray when no matching documents exist" { + use db = PostgresDb.BuildDb() + do! Definition.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc + let! json = Json.byFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] + verifyEmpty json + } + ] + testList "byFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = + Json.byFieldsOrdered PostgresDb.TableName All [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyExpectedOrder json "five" "four" None None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = + Json.byFieldsOrdered PostgresDb.TableName All [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyExpectedOrder json "four" "five" None None None + } + ] + testList "byContains" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + let! json = Json.byContains PostgresDb.TableName {| Sub = {| Foo = "green" |} |} + verifyBeginEnd json + verifyDocById json "two" + verifyDocById json "four" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.byContains PostgresDb.TableName {| Value = "mauve" |} + verifyEmpty json + } + ] + testList "byContainsOrdered" [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = + Json.byContainsOrdered PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar" ] + verifyExpectedOrder json "two" "four" None None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = + Json.byContainsOrdered + PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar DESC" ] + verifyExpectedOrder json "four" "two" None None None + } + ] + testList "byJsonPath" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + let! json = Json.byJsonPath PostgresDb.TableName "$.NumValue ? (@ < 15)" + verifyBeginEnd json + verifyDocById json "one" + verifyDocById json "two" + verifyDocById json "three" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.byJsonPath PostgresDb.TableName "$.NumValue ? (@ < 0)" + verifyEmpty json + } + ] + testList "byJsonPathOrdered" [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.byJsonPathOrdered PostgresDb.TableName "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue" ] + verifyExpectedOrder json "one" "three" (Some "two") None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = + Json.byJsonPathOrdered PostgresDb.TableName "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder json "two" "three" (Some "one") None None + } + ] + testList "firstByFields" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.firstByFields PostgresDb.TableName Any [ Field.Equal "Value" "another" ] + verifyDocById json "two" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.firstByFields PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] + verifyAnyById json [ "five"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.firstByFields PostgresDb.TableName Any [ Field.Equal "Value" "absent" ] + verifyNoDoc json + } + ] + testList "firstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = + Json.firstByFieldsOrdered PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyDocById json "five" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = + Json.firstByFieldsOrdered + PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyDocById json "four" + } + ] + testList "firstByContains" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.firstByContains PostgresDb.TableName {| Value = "another" |} + verifyDocById json "two" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.firstByContains PostgresDb.TableName {| Sub = {| Foo = "green" |} |} + verifyAnyById json [ "two"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.firstByContains PostgresDb.TableName {| Value = "absent" |} + verifyNoDoc json + } + ] + testList "firstByContainsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = + Json.firstByContainsOrdered PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Value" ] + verifyDocById json "two" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = + Json.firstByContainsOrdered + PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Value DESC" ] + verifyDocById json "four" + } + ] + testList "firstByJsonPath" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.firstByJsonPath PostgresDb.TableName """$.Value ? (@ == "FIRST!")""" + verifyDocById json "one" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.firstByJsonPath PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" + verifyAnyById json [ "two"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = Json.firstByJsonPath PostgresDb.TableName """$.Id ? (@ == "nope")""" + verifyNoDoc json + } + ] + testList "firstByJsonPathOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = + Json.firstByJsonPathOrdered + PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar" ] + verifyDocById json "two" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + let! json = + Json.firstByJsonPathOrdered + PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar DESC" ] + verifyDocById json "four" + } + ] + testList "writeAll" [ + testTask "succeeds when there is data" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeAll PostgresDb.TableName writer + verifyAllData (streamText stream) + finally + writer.Complete() + } + testTask "succeeds when there is no data" { + use db = PostgresDb.BuildDb() + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeAll PostgresDb.TableName writer + verifyEmpty (streamText stream) + finally + writer.Complete() + } + ] + testList "writeAllOrdered" [ + testTask "succeeds when ordering numerically" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue" ] + verifyExpectedOrder (streamText stream) "one" "three" (Some "two") (Some "four") (Some "five") + finally + writer.Complete() + } + testTask "succeeds when ordering numerically descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder (streamText stream) "five" "four" (Some "two") (Some "three") (Some "one") + finally + writer.Complete() + } + testTask "succeeds when ordering alphabetically" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeAllOrdered PostgresDb.TableName writer [ Field.Named "Id DESC" ] + verifyExpectedOrder (streamText stream) "two" "three" (Some "one") (Some "four") (Some "five") + finally + writer.Complete() + } + ] + testList "writeById" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeById PostgresDb.TableName writer "two" + let json = streamText stream + Expect.stringStarts json """{"Id": "two",""" "An incorrect document was returned" + Expect.stringEnds json "}" "JSON should have ended with this document" + finally + writer.Complete() + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeById PostgresDb.TableName writer "three hundred eighty-seven" + verifyNoDoc (streamText stream) + finally + writer.Complete() + } + ] + testList "writeByFields" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFields + PostgresDb.TableName writer All [ Field.In "Value" [ "purple"; "blue" ]; Field.Exists "Sub" ] + verifySingleById (streamText stream) "four" + finally + writer.Complete() + } + testTask "succeeds when documents are found using IN with numeric field" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFields PostgresDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + verifySingleById (streamText stream) "three" + finally + writer.Complete() + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFields + PostgresDb.TableName writer All [ Field.Equal "Value" "mauve"; Field.NotEqual "NumValue" 40 ] + verifyEmpty (streamText stream) + finally + writer.Complete() + } + testTask "succeeds for InArray when matching documents exist" { + use db = PostgresDb.BuildDb() + do! Definition.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFields + PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] + let json = streamText stream + verifyBeginEnd json + verifyDocById json "first" + verifyDocById json "second" + finally + writer.Complete() + } + testTask "succeeds for InArray when no matching documents exist" { + use db = PostgresDb.BuildDb() + do! Definition.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFields + PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] + verifyEmpty (streamText stream) + finally + writer.Complete() + } + ] + testList "writeByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFieldsOrdered + PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyExpectedOrder (streamText stream) "five" "four" None None None + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFieldsOrdered + PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyExpectedOrder (streamText stream) "four" "five" None None None + finally + writer.Complete() + } + ] + testList "writeByContains" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} + let json = streamText stream + verifyBeginEnd json + verifyDocById json "two" + verifyDocById json "four" + finally + writer.Complete() + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByContains PostgresDb.TableName writer {| Value = "mauve" |} + verifyEmpty (streamText stream) + finally + writer.Complete() + } + ] + testList "writeByContainsOrdered" [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar" ] + verifyExpectedOrder (streamText stream) "two" "four" None None None + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar DESC" ] + verifyExpectedOrder (streamText stream) "four" "two" None None None + finally + writer.Complete() + } + ] + testList "writeByJsonPath" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 15)" + let json = streamText stream + verifyBeginEnd json + verifyDocById json "one" + verifyDocById json "two" + verifyDocById json "three" + finally + writer.Complete() + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 0)" + verifyEmpty (streamText stream) + finally + writer.Complete() + } + ] + testList "writeByJsonPathOrdered" [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByJsonPathOrdered + PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue" ] + verifyExpectedOrder (streamText stream) "one" "three" (Some "two") None None + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByJsonPathOrdered + PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder (streamText stream) "two" "three" (Some "one") None None + finally + writer.Complete() + } + ] + testList "writeFirstByFields" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "another" ] + verifyDocById (streamText stream) "two" + finally + writer.Complete() + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] + verifyAnyById (streamText stream) [ "five"; "four" ] + finally + writer.Complete() + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "absent" ] + verifyNoDoc (streamText stream) + finally + writer.Complete() + } + ] + testList "writeFirstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByFieldsOrdered + PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyDocById (streamText stream) "five" + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByFieldsOrdered + PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyDocById (streamText stream) "four" + finally + writer.Complete() + } + ] + testList "writeFirstByContains" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByContains PostgresDb.TableName writer {| Value = "another" |} + verifyDocById (streamText stream) "two" + finally + writer.Complete() + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} + verifyAnyById (streamText stream) [ "two"; "four" ] + finally + writer.Complete() + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByContains PostgresDb.TableName writer {| Value = "absent" |} + verifyNoDoc (streamText stream) + finally + writer.Complete() + } + ] + testList "writeFirstByContainsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value" ] + verifyDocById (streamText stream) "two" + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value DESC" ] + verifyDocById (streamText stream) "four" + finally + writer.Complete() + } + ] + testList "writeFirstByJsonPath" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByJsonPath PostgresDb.TableName writer """$.Value ? (@ == "FIRST!")""" + verifyDocById (streamText stream) "one" + finally + writer.Complete() + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByJsonPath PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" + verifyAnyById (streamText stream) [ "two"; "four" ] + finally + writer.Complete() + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByJsonPath PostgresDb.TableName writer """$.Id ? (@ == "nope")""" + verifyNoDoc (streamText stream) + finally + writer.Complete() + } + ] + testList "writeFirstByJsonPathOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByJsonPathOrdered + PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar" ] + verifyDocById (streamText stream) "two" + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByJsonPathOrdered + PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar DESC" ] + verifyDocById (streamText stream) "four" + finally + writer.Complete() + } + ] +] + /// Integration tests for the Update module of the PostgreSQL library let updateTests = testList "Update" [ testList "byId" [ @@ -1034,7 +1979,7 @@ let updateTests = testList "Update" [ let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! Update.byId PostgresDb.TableName "test" { emptyDoc with Id = "x"; Sub = Some { Foo = "blue"; Bar = "red" } } @@ -1045,7 +1990,7 @@ let updateTests = testList "Update" [ use db = PostgresDb.BuildDb() do! loadDocs () - do! Update.byFunc PostgresDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + do! Update.byFunc PostgresDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } let! after = Find.byId PostgresDb.TableName "one" Expect.isSome after "There should have been a document returned post-update" Expect.equal @@ -1058,9 +2003,9 @@ let updateTests = testList "Update" [ let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test - do! Update.byFunc PostgresDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + do! Update.byFunc PostgresDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } } ] ] @@ -1071,7 +2016,7 @@ let patchTests = testList "Patch" [ testTask "succeeds when a document is updated" { use db = PostgresDb.BuildDb() do! loadDocs () - + do! Patch.byId PostgresDb.TableName "one" {| NumValue = 44 |} let! after = Find.byId PostgresDb.TableName "one" Expect.isSome after "There should have been a document returned post-update" @@ -1082,7 +2027,7 @@ let patchTests = testList "Patch" [ let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! Patch.byId PostgresDb.TableName "test" {| Foo = "green" |} } @@ -1091,7 +2036,7 @@ let patchTests = testList "Patch" [ testTask "succeeds when a document is updated" { use db = PostgresDb.BuildDb() do! loadDocs () - + do! Patch.byFields PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] {| NumValue = 77 |} let! after = Count.byFields PostgresDb.TableName Any [ Field.Equal "NumValue" 77 ] Expect.equal after 2 "There should have been 2 documents returned" @@ -1101,7 +2046,7 @@ let patchTests = testList "Patch" [ let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! Patch.byFields PostgresDb.TableName Any [ Field.Equal "Value" "burgundy" ] {| Foo = "green" |} } @@ -1110,7 +2055,7 @@ let patchTests = testList "Patch" [ testTask "succeeds when a document is updated" { use db = PostgresDb.BuildDb() do! loadDocs () - + do! Patch.byContains PostgresDb.TableName {| Value = "purple" |} {| NumValue = 77 |} let! after = Count.byContains PostgresDb.TableName {| NumValue = 77 |} Expect.equal after 2 "There should have been 2 documents returned" @@ -1120,7 +2065,7 @@ let patchTests = testList "Patch" [ let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! Patch.byContains PostgresDb.TableName {| Value = "burgundy" |} {| Foo = "green" |} } @@ -1129,7 +2074,7 @@ let patchTests = testList "Patch" [ testTask "succeeds when a document is updated" { use db = PostgresDb.BuildDb() do! loadDocs () - + do! Patch.byJsonPath PostgresDb.TableName "$.NumValue ? (@ > 10)" {| NumValue = 1000 |} let! after = Count.byJsonPath PostgresDb.TableName "$.NumValue ? (@ > 999)" Expect.equal after 2 "There should have been 2 documents returned" @@ -1139,7 +2084,7 @@ let patchTests = testList "Patch" [ let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! Patch.byJsonPath PostgresDb.TableName "$.NumValue ? (@ < 0)" {| Foo = "green" |} } @@ -1172,13 +2117,13 @@ let removeFieldsTests = testList "RemoveFields" [ testTask "succeeds when a field is not removed" { use db = PostgresDb.BuildDb() do! loadDocs () - + // This not raising an exception is the test do! RemoveFields.byId PostgresDb.TableName "two" [ "AFieldThatIsNotThere" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() - + // This not raising an exception is the test do! RemoveFields.byId PostgresDb.TableName "two" [ "Value" ] } @@ -1207,13 +2152,13 @@ let removeFieldsTests = testList "RemoveFields" [ testTask "succeeds when a field is not removed" { use db = PostgresDb.BuildDb() do! loadDocs () - + // This not raising an exception is the test do! RemoveFields.byFields PostgresDb.TableName Any [ Field.Equal "NumValue" 17 ] [ "Nothing" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() - + // This not raising an exception is the test do! RemoveFields.byFields PostgresDb.TableName Any [ Field.NotEqual "Abracadabra" "apple" ] [ "Value" ] } @@ -1242,13 +2187,13 @@ let removeFieldsTests = testList "RemoveFields" [ testTask "succeeds when a field is not removed" { use db = PostgresDb.BuildDb() do! loadDocs () - + // This not raising an exception is the test do! RemoveFields.byContains PostgresDb.TableName {| NumValue = 17 |} [ "Nothing" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() - + // This not raising an exception is the test do! RemoveFields.byContains PostgresDb.TableName {| Abracadabra = "apple" |} [ "Value" ] } @@ -1277,13 +2222,13 @@ let removeFieldsTests = testList "RemoveFields" [ testTask "succeeds when a field is not removed" { use db = PostgresDb.BuildDb() do! loadDocs () - + // This not raising an exception is the test do! RemoveFields.byJsonPath PostgresDb.TableName "$.NumValue ? (@ == 17)" [ "Nothing" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() - + // This not raising an exception is the test do! RemoveFields.byJsonPath PostgresDb.TableName "$.Abracadabra ? (@ == \"apple\")" [ "Value" ] } @@ -1377,6 +2322,7 @@ let all = testList "Postgres" [ countTests existsTests findTests + jsonTests updateTests patchTests removeFieldsTests diff --git a/src/Tests/SqliteExtensionTests.fs b/src/Tests/SqliteExtensionTests.fs index 8f0380e..87f74d2 100644 --- a/src/Tests/SqliteExtensionTests.fs +++ b/src/Tests/SqliteExtensionTests.fs @@ -1,5 +1,7 @@ module SqliteExtensionTests +open System.IO +open System.IO.Pipelines open System.Text.Json open BitBadger.Documents open BitBadger.Documents.Sqlite @@ -10,9 +12,41 @@ open Types /// Integration tests for the F# extensions on the SqliteConnection data type let integrationTests = - let loadDocs () = backgroundTask { - for doc in testDocuments do do! insert SqliteDb.TableName doc + let loadDocs (conn: SqliteConnection) = backgroundTask { + for doc in testDocuments do do! conn.insert SqliteDb.TableName doc } + + /// Set up a stream writer for a test + let writeStream (stream: Stream) = + PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + + /// Get the text of the given stream + let streamText (stream: Stream) = + stream.Position <- 0L + use reader = new StreamReader(stream) + reader.ReadToEnd() + + /// Verify a JSON array begins with "[" and ends with "]" + let verifyBeginEnd json = + Expect.stringStarts json "[" "The array should have started with `[`" + Expect.stringEnds json "]" "The array should have ended with `]`" + + /// Verify an empty JSON array + let verifyEmpty json = + Expect.equal json "[]" "There should be no documents returned" + + /// Verify an empty JSON document + let verifyNoDoc json = + Expect.equal json "{}" "There should be no document returned" + + /// Verify the presence of any of the given documents in the given JSON + let verifyAny (json: string) (docs: string list) = + match docs |> List.tryFind json.Contains with + | Some _ -> () + | None -> + let theDocs = docs |> String.concat " | " + Expect.isTrue false $"Could not find any of |{theDocs}| in {json}" + testList "Sqlite.Extensions" [ testTask "ensureTable succeeds" { use! db = SqliteDb.BuildDb() @@ -22,12 +56,12 @@ let integrationTests = $"SELECT EXISTS (SELECT 1 FROM {SqliteDb.Catalog} WHERE name = @name) AS it" [ SqliteParameter("@name", name) ] toExists - + let! exists = itExists "ensured" let! alsoExists = itExists "idx_ensured_key" Expect.isFalse exists "The table should not exist already" Expect.isFalse alsoExists "The key index should not exist already" - + do! conn.ensureTable "ensured" let! exists' = itExists "ensured" let! alsoExists' = itExists "idx_ensured_key" @@ -42,10 +76,10 @@ let integrationTests = $"SELECT EXISTS (SELECT 1 FROM {SqliteDb.Catalog} WHERE name = 'idx_ensured_test') AS it" [] toExists - + let! exists = indexExists () Expect.isFalse exists "The index should not exist already" - + do! conn.ensureTable "ensured" do! conn.ensureFieldIndex "ensured" "test" [ "Name"; "Age" ] let! exists' = indexExists () @@ -57,7 +91,7 @@ let integrationTests = use conn = Configuration.dbConn () let! before = conn.findAll SqliteDb.TableName Expect.equal before [] "There should be no documents in the table" - + let testDoc = { emptyDoc with Id = "turkey"; Sub = Some { Foo = "gobble"; Bar = "gobble" } } do! conn.insert SqliteDb.TableName testDoc let! after = conn.findAll SqliteDb.TableName @@ -81,7 +115,7 @@ let integrationTests = use conn = Configuration.dbConn () let! before = conn.findAll SqliteDb.TableName Expect.equal before [] "There should be no documents in the table" - + let testDoc = { emptyDoc with Id = "test"; Sub = Some { Foo = "a"; Bar = "b" } } do! conn.save SqliteDb.TableName testDoc let! after = conn.findAll SqliteDb.TableName @@ -92,11 +126,11 @@ let integrationTests = use conn = Configuration.dbConn () let testDoc = { emptyDoc with Id = "test"; Sub = Some { Foo = "a"; Bar = "b" } } do! conn.insert SqliteDb.TableName testDoc - + let! before = conn.findById SqliteDb.TableName "test" if Option.isNone before then Expect.isTrue false "There should have been a document returned" Expect.equal before.Value testDoc "The document is not correct" - + let upd8Doc = { testDoc with Sub = Some { Foo = "c"; Bar = "d" } } do! conn.save SqliteDb.TableName upd8Doc let! after = conn.findById SqliteDb.TableName "test" @@ -108,16 +142,16 @@ let integrationTests = testTask "countAll succeeds" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! theCount = conn.countAll SqliteDb.TableName Expect.equal theCount 5L "There should have been 5 matching documents" } testTask "countByFields succeeds" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! theCount = conn.countByFields SqliteDb.TableName Any [ Field.Equal "Value" "purple" ] Expect.equal theCount 2L "There should have been 2 matching documents" } @@ -125,16 +159,16 @@ let integrationTests = testTask "succeeds when a document exists" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! exists = conn.existsById SqliteDb.TableName "three" Expect.isTrue exists "There should have been an existing document" } testTask "succeeds when a document does not exist" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! exists = conn.existsById SqliteDb.TableName "seven" Expect.isFalse exists "There should not have been an existing document" } @@ -143,16 +177,16 @@ let integrationTests = testTask "succeeds when documents exist" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! exists = conn.existsByFields SqliteDb.TableName Any [ Field.Equal "NumValue" 10 ] Expect.isTrue exists "There should have been existing documents" } testTask "succeeds when no matching documents exist" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! exists = conn.existsByFields SqliteDb.TableName Any [ Field.Equal "Nothing" "none" ] Expect.isFalse exists "There should not have been any existing documents" } @@ -161,11 +195,11 @@ let integrationTests = testTask "succeeds when there is data" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + do! insert SqliteDb.TableName { Foo = "one"; Bar = "two" } do! insert SqliteDb.TableName { Foo = "three"; Bar = "four" } do! insert SqliteDb.TableName { Foo = "five"; Bar = "six" } - + let! results = conn.findAll SqliteDb.TableName let expected = [ { Foo = "one"; Bar = "two" } @@ -185,8 +219,8 @@ let integrationTests = testTask "succeeds when ordering numerically" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! results = conn.findAllOrdered SqliteDb.TableName [ Field.Named "n:NumValue" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -197,8 +231,8 @@ let integrationTests = testTask "succeeds when ordering numerically descending" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! results = conn.findAllOrdered SqliteDb.TableName [ Field.Named "n:NumValue DESC" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -209,8 +243,8 @@ let integrationTests = testTask "succeeds when ordering alphabetically" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! results = conn.findAllOrdered SqliteDb.TableName [ Field.Named "Id DESC" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -223,8 +257,8 @@ let integrationTests = testTask "succeeds when a document is found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! doc = conn.findById SqliteDb.TableName "two" Expect.isSome doc "There should have been a document returned" Expect.equal doc.Value.Id "two" "The incorrect document was returned" @@ -232,8 +266,8 @@ let integrationTests = testTask "succeeds when a document is not found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! doc = conn.findById SqliteDb.TableName "three hundred eighty-seven" Expect.isNone doc "There should not have been a document returned" } @@ -242,16 +276,16 @@ let integrationTests = testTask "succeeds when documents are found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! docs = conn.findByFields SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] Expect.hasLength docs 2 "There should have been two documents returned" } testTask "succeeds when documents are not found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! docs = conn.findByFields SqliteDb.TableName Any [ Field.Equal "Value" "mauve" ] Expect.isEmpty docs "There should have been no documents returned" } @@ -260,7 +294,7 @@ let integrationTests = testTask "succeeds when sorting ascending" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! docs = conn.findByFieldsOrdered @@ -271,7 +305,7 @@ let integrationTests = testTask "succeeds when sorting descending" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! docs = conn.findByFieldsOrdered @@ -284,8 +318,8 @@ let integrationTests = testTask "succeeds when a document is found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! doc = conn.findFirstByFields SqliteDb.TableName Any [ Field.Equal "Value" "another" ] Expect.isSome doc "There should have been a document returned" Expect.equal doc.Value.Id "two" "The incorrect document was returned" @@ -293,8 +327,8 @@ let integrationTests = testTask "succeeds when multiple documents are found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! doc = conn.findFirstByFields SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] Expect.isSome doc "There should have been a document returned" Expect.contains [ "two"; "four" ] doc.Value.Id "An incorrect document was returned" @@ -302,8 +336,8 @@ let integrationTests = testTask "succeeds when a document is not found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! doc = conn.findFirstByFields SqliteDb.TableName Any [ Field.Equal "Value" "absent" ] Expect.isNone doc "There should not have been a document returned" } @@ -312,7 +346,7 @@ let integrationTests = testTask "succeeds when sorting ascending" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! doc = conn.findFirstByFieldsOrdered @@ -323,7 +357,7 @@ let integrationTests = testTask "succeeds when sorting descending" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! doc = conn.findFirstByFieldsOrdered @@ -332,12 +366,573 @@ let integrationTests = Expect.equal "four" doc.Value.Id "An incorrect document was returned" } ] + testList "jsonAll" [ + testTask "succeeds when there is data" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + + do! conn.insert SqliteDb.TableName { Foo = "one"; Bar = "two" } + do! conn.insert SqliteDb.TableName { Foo = "three"; Bar = "four" } + do! conn.insert SqliteDb.TableName { Foo = "five"; Bar = "six" } + + let! json = conn.jsonAll SqliteDb.TableName + verifyBeginEnd json + Expect.stringContains json """{"Foo":"one","Bar":"two"}""" "The first document was not found" + Expect.stringContains json """{"Foo":"three","Bar":"four"}""" "The second document was not found" + Expect.stringContains json """{"Foo":"five","Bar":"six"}""" "The third document was not found" + } + testTask "succeeds when there is no data" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + let! json = conn.jsonAll SqliteDb.TableName + verifyEmpty json + } + ] + testList "jsonAllOrdered" [ + testTask "succeeds when ordering numerically" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonAllOrdered SqliteDb.TableName [ Field.Named "n:NumValue" ] + Expect.equal + json + $"[{JsonDocument.one},{JsonDocument.three},{JsonDocument.two},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + } + testTask "succeeds when ordering numerically descending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonAllOrdered SqliteDb.TableName [ Field.Named "n:NumValue DESC" ] + Expect.equal + json + $"[{JsonDocument.five},{JsonDocument.four},{JsonDocument.two},{JsonDocument.three},{JsonDocument.one}]" + "The documents were not ordered correctly" + } + testTask "succeeds when ordering alphabetically" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonAllOrdered SqliteDb.TableName [ Field.Named "Id DESC" ] + Expect.equal + json + $"[{JsonDocument.two},{JsonDocument.three},{JsonDocument.one},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + } + ] + testList "jsonById" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonById SqliteDb.TableName "two" + Expect.equal json JsonDocument.two "The incorrect document was returned" + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonById SqliteDb.TableName "three hundred eighty-seven" + verifyNoDoc json + } + ] + testList "jsonByFields" [ + testTask "succeeds when documents are found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonByFields SqliteDb.TableName Any [ Field.Greater "NumValue" 15 ] + verifyBeginEnd json + Expect.stringContains json JsonDocument.four "Document `four` should have been returned" + Expect.stringContains json JsonDocument.five "Document `five` should have been returned" + } + testTask "succeeds when documents are found using IN with numeric field" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonByFields SqliteDb.TableName All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + Expect.equal json $"[{JsonDocument.three}]" "There should have been one document returned" + } + testTask "succeeds when documents are not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonByFields SqliteDb.TableName Any [ Field.Greater "NumValue" 100 ] + verifyEmpty json + } + testTask "succeeds for InArray when matching documents exist" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! conn.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert SqliteDb.TableName doc + + let! json = + conn.jsonByFields SqliteDb.TableName All [ Field.InArray "Values" SqliteDb.TableName [ "c" ] ] + verifyBeginEnd json + Expect.stringContains + json """{"Id":"first","Values":["a","b","c"]}""" "Document `first` should have been returned" + Expect.stringContains + json """{"Id":"second","Values":["c","d","e"]}""" "Document `second` should have been returned" + } + testTask "succeeds for InArray when no matching documents exist" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! conn.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert SqliteDb.TableName doc + + let! json = + conn.jsonByFields SqliteDb.TableName All [ Field.InArray "Values" SqliteDb.TableName [ "j" ] ] + verifyEmpty json + } + ] + testList "jsonByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = + conn.jsonByFieldsOrdered SqliteDb.TableName Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id" ] + Expect.equal json $"[{JsonDocument.five},{JsonDocument.four}]" "Incorrect documents were returned" + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = + conn.jsonByFieldsOrdered + SqliteDb.TableName Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id DESC" ] + Expect.equal json $"[{JsonDocument.four},{JsonDocument.five}]" "Incorrect documents were returned" + } + testTask "succeeds when sorting case-sensitively" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = + conn.jsonByFieldsOrdered + SqliteDb.TableName All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "Value" ] + Expect.equal + json + $"[{JsonDocument.three},{JsonDocument.one},{JsonDocument.two}]" + "Documents not ordered correctly" + } + testTask "succeeds when sorting case-insensitively" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = + conn.jsonByFieldsOrdered + SqliteDb.TableName All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "i:Value" ] + Expect.equal + json + $"[{JsonDocument.three},{JsonDocument.two},{JsonDocument.one}]" + "Documents not ordered correctly" + } + ] + testList "jsonFirstByFields" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonFirstByFields SqliteDb.TableName Any [ Field.Equal "Value" "another" ] + Expect.equal json JsonDocument.two "The incorrect document was returned" + } + testTask "succeeds when multiple documents are found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonFirstByFields SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] + Expect.notEqual json "{}" "There should have been a document returned" + verifyAny json [ JsonDocument.two; JsonDocument.four ] + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonFirstByFields SqliteDb.TableName Any [ Field.Equal "Value" "absent" ] + verifyNoDoc json + } + ] + testList "jsonFirstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = + conn.jsonFirstByFieldsOrdered + SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar" ] + Expect.equal json JsonDocument.two "An incorrect document was returned" + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = + conn.jsonFirstByFieldsOrdered + SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar DESC" ] + Expect.equal json JsonDocument.four "An incorrect document was returned" + } + ] + testList "writeJsonAll" [ + testTask "succeeds when there is data" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + + do! conn.insert SqliteDb.TableName { Foo = "one"; Bar = "two" } + do! conn.insert SqliteDb.TableName { Foo = "three"; Bar = "four" } + do! conn.insert SqliteDb.TableName { Foo = "five"; Bar = "six" } + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonAll SqliteDb.TableName writer + let json = streamText stream + verifyBeginEnd json + Expect.stringContains json """{"Foo":"one","Bar":"two"}""" "The first document was not found" + Expect.stringContains json """{"Foo":"three","Bar":"four"}""" "The second document was not found" + Expect.stringContains json """{"Foo":"five","Bar":"six"}""" "The third document was not found" + finally + writer.Complete() + } + testTask "succeeds when there is no data" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonAll SqliteDb.TableName writer + verifyEmpty (streamText stream) + finally + writer.Complete() + } + ] + testList "writeJsonAllOrdered" [ + testTask "succeeds when ordering numerically" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.one},{JsonDocument.three},{JsonDocument.two},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + finally + writer.Complete() + } + testTask "succeeds when ordering numerically descending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue DESC" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.five},{JsonDocument.four},{JsonDocument.two},{JsonDocument.three},{JsonDocument.one}]" + "The documents were not ordered correctly" + finally + writer.Complete() + } + testTask "succeeds when ordering alphabetically" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonAllOrdered SqliteDb.TableName writer [ Field.Named "Id DESC" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.two},{JsonDocument.three},{JsonDocument.one},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + finally + writer.Complete() + } + ] + testList "writeJsonById" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonById SqliteDb.TableName writer "two" + Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + finally + writer.Complete() + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonById SqliteDb.TableName writer "three hundred eighty-seven" + verifyNoDoc (streamText stream) + finally + writer.Complete() + } + ] + testList "writeJsonByFields" [ + testTask "succeeds when documents are found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] + let json = streamText stream + verifyBeginEnd json + Expect.stringContains json JsonDocument.four "Document `four` should have been returned" + Expect.stringContains json JsonDocument.five "Document `five` should have been returned" + finally + writer.Complete() + } + testTask "succeeds when documents are found using IN with numeric field" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFields SqliteDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + Expect.equal + (streamText stream) $"[{JsonDocument.three}]" "There should have been one document returned" + finally + writer.Complete() + } + testTask "succeeds when documents are not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 100 ] + verifyEmpty (streamText stream) + finally + writer.Complete() + } + testTask "succeeds for InArray when matching documents exist" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! conn.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert SqliteDb.TableName doc + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFields + SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "c" ] ] + let json = streamText stream + verifyBeginEnd json + Expect.stringContains + json """{"Id":"first","Values":["a","b","c"]}""" "Document `first` should have been returned" + Expect.stringContains + json """{"Id":"second","Values":["c","d","e"]}""" "Document `second` should have been returned" + finally + writer.Complete() + } + testTask "succeeds for InArray when no matching documents exist" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! conn.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert SqliteDb.TableName doc + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFields + SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "j" ] ] + verifyEmpty (streamText stream) + finally + writer.Complete() + } + ] + testList "writeJsonByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.five},{JsonDocument.four}]" + "Incorrect documents were returned" + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id DESC" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.four},{JsonDocument.five}]" + "Incorrect documents were returned" + finally + writer.Complete() + } + testTask "succeeds when sorting case-sensitively" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFieldsOrdered + SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "Value" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.three},{JsonDocument.one},{JsonDocument.two}]" + "Documents not ordered correctly" + finally + writer.Complete() + } + testTask "succeeds when sorting case-insensitively" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonByFieldsOrdered + SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "i:Value" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.three},{JsonDocument.two},{JsonDocument.one}]" + "Documents not ordered correctly" + finally + writer.Complete() + } + ] + testList "writeJsonFirstByFields" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "another" ] + Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + finally + writer.Complete() + } + testTask "succeeds when multiple documents are found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] + let json = streamText stream + Expect.notEqual json "{}" "There should have been a document returned" + verifyAny json [ JsonDocument.two; JsonDocument.four ] + finally + writer.Complete() + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "absent" ] + verifyNoDoc (streamText stream) + finally + writer.Complete() + } + ] + testList "writeJsonFirstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar" ] + Expect.equal (streamText stream) JsonDocument.two "An incorrect document was returned" + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeJsonFirstByFieldsOrdered + SqliteDb.TableName + writer + Any + [ Field.Equal "Sub.Foo" "green" ] + [ Field.Named "Sub.Bar DESC" ] + Expect.equal (streamText stream) JsonDocument.four "An incorrect document was returned" + finally + writer.Complete() + } + ] testList "updateById" [ testTask "succeeds when a document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let testDoc = { emptyDoc with Id = "one"; Sub = Some { Foo = "blue"; Bar = "red" } } do! conn.updateById SqliteDb.TableName "one" testDoc let! after = conn.findById SqliteDb.TableName "one" @@ -348,10 +943,10 @@ let integrationTests = testTask "succeeds when no document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + let! before = conn.findAll SqliteDb.TableName Expect.isEmpty before "There should have been no documents returned" - + // This not raising an exception is the test do! conn.updateById SqliteDb.TableName @@ -363,10 +958,10 @@ let integrationTests = testTask "succeeds when a document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + do! conn.updateByFunc - SqliteDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + SqliteDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } let! after = conn.findById SqliteDb.TableName "one" if Option.isNone after then Expect.isTrue false "There should have been a document returned post-update" @@ -378,21 +973,21 @@ let integrationTests = testTask "succeeds when no document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + let! before = conn.findAll SqliteDb.TableName Expect.isEmpty before "There should have been no documents returned" - + // This not raising an exception is the test do! conn.updateByFunc - SqliteDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + SqliteDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } } ] testList "patchById" [ testTask "succeeds when a document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + do! conn.patchById SqliteDb.TableName "one" {| NumValue = 44 |} let! after = conn.findById SqliteDb.TableName "one" if Option.isNone after then @@ -402,10 +997,10 @@ let integrationTests = testTask "succeeds when no document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + let! before = conn.findAll SqliteDb.TableName Expect.isEmpty before "There should have been no documents returned" - + // This not raising an exception is the test do! conn.patchById SqliteDb.TableName "test" {| Foo = "green" |} } @@ -414,8 +1009,8 @@ let integrationTests = testTask "succeeds when a document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + do! conn.patchByFields SqliteDb.TableName Any [ Field.Equal "Value" "purple" ] {| NumValue = 77 |} let! after = conn.countByFields SqliteDb.TableName Any [ Field.Equal "NumValue" 77 ] Expect.equal after 2L "There should have been 2 documents returned" @@ -423,10 +1018,10 @@ let integrationTests = testTask "succeeds when no document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + let! before = conn.findAll SqliteDb.TableName Expect.isEmpty before "There should have been no documents returned" - + // This not raising an exception is the test do! conn.patchByFields SqliteDb.TableName Any [ Field.Equal "Value" "burgundy" ] {| Foo = "green" |} } @@ -435,8 +1030,8 @@ let integrationTests = testTask "succeeds when fields are removed" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + do! conn.removeFieldsById SqliteDb.TableName "two" [ "Sub"; "Value" ] try let! _ = conn.findById SqliteDb.TableName "two" @@ -448,15 +1043,15 @@ let integrationTests = testTask "succeeds when a field is not removed" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + // This not raising an exception is the test do! conn.removeFieldsById SqliteDb.TableName "two" [ "AFieldThatIsNotThere" ] } testTask "succeeds when no document is matched" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + // This not raising an exception is the test do! conn.removeFieldsById SqliteDb.TableName "two" [ "Value" ] } @@ -465,8 +1060,8 @@ let integrationTests = testTask "succeeds when a field is removed" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + do! conn.removeFieldsByFields SqliteDb.TableName Any [ Field.Equal "NumValue" 17 ] [ "Sub" ] try let! _ = conn.findById SqliteDb.TableName "four" @@ -478,15 +1073,15 @@ let integrationTests = testTask "succeeds when a field is not removed" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + // This not raising an exception is the test do! conn.removeFieldsByFields SqliteDb.TableName Any [ Field.Equal "NumValue" 17 ] [ "Nothing" ] } testTask "succeeds when no document is matched" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + // This not raising an exception is the test do! conn.removeFieldsByFields SqliteDb.TableName Any [ Field.NotEqual "Abracadabra" "apple" ] [ "Value" ] @@ -496,8 +1091,8 @@ let integrationTests = testTask "succeeds when a document is deleted" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + do! conn.deleteById SqliteDb.TableName "four" let! remaining = conn.countAll SqliteDb.TableName Expect.equal remaining 4L "There should have been 4 documents remaining" @@ -505,8 +1100,8 @@ let integrationTests = testTask "succeeds when a document is not deleted" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + do! conn.deleteById SqliteDb.TableName "thirty" let! remaining = conn.countAll SqliteDb.TableName Expect.equal remaining 5L "There should have been 5 documents remaining" @@ -516,8 +1111,8 @@ let integrationTests = testTask "succeeds when documents are deleted" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + do! conn.deleteByFields SqliteDb.TableName Any [ Field.NotEqual "Value" "purple" ] let! remaining = conn.countAll SqliteDb.TableName Expect.equal remaining 2L "There should have been 2 documents remaining" @@ -525,19 +1120,108 @@ let integrationTests = testTask "succeeds when documents are not deleted" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + do! conn.deleteByFields SqliteDb.TableName Any [ Field.Equal "Value" "crimson" ] let! remaining = conn.countAll SqliteDb.TableName Expect.equal remaining 5L "There should have been 5 documents remaining" } ] + testList "customList" [ + testTask "succeeds when data is found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! docs = conn.customList (Query.find SqliteDb.TableName) [] fromData + Expect.hasLength docs 5 "There should have been 5 documents returned" + } + testTask "succeeds when data is not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! docs = + conn.customList + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" + [ SqliteParameter("@value", 100) ] + fromData + Expect.isEmpty docs "There should have been no documents returned" + } + ] + testList "customJsonArray" [ + testTask "succeeds when data is found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.customJsonArray (Query.find SqliteDb.TableName) [] jsonFromData + Expect.stringStarts json "[" "The JSON array should have started with `[`" + Expect.stringContains json JsonDocument.one "Document ID `one` should have been found" + Expect.stringContains json JsonDocument.two "Document ID `two` should have been found" + Expect.stringContains json JsonDocument.three "Document ID `three` should have been found" + Expect.stringContains json JsonDocument.four "Document ID `four` should have been found" + Expect.stringContains json JsonDocument.five "Document ID `five` should have been found" + Expect.stringEnds json "]" "The JSON array should have ended with `[`" + } + testTask "succeeds when data is not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! docs = + conn.customJsonArray + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" + [ SqliteParameter("@value", 100) ] + jsonFromData + Expect.equal docs "[]" "There should have been no documents returned" + } + ] + testList "writeCustomJsonArray" [ + testTask "succeeds when data is found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeCustomJsonArray (Query.find SqliteDb.TableName) [] writer jsonFromData + let json = streamText stream + Expect.stringStarts json "[" "The JSON array should have started with `[`" + Expect.stringContains json JsonDocument.one "Document ID `one` should have been found" + Expect.stringContains json JsonDocument.two "Document ID `two` should have been found" + Expect.stringContains json JsonDocument.three "Document ID `three` should have been found" + Expect.stringContains json JsonDocument.four "Document ID `four` should have been found" + Expect.stringContains json JsonDocument.five "Document ID `five` should have been found" + Expect.stringEnds json "]" "The JSON array should have ended with `]`" + finally + writer.Complete() + } + testTask "succeeds when data is not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! conn.writeCustomJsonArray + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" + [ SqliteParameter("@value", 100) ] + writer + jsonFromData + Expect.equal (streamText stream) "[]" "There should have been no documents returned" + finally + writer.Complete() + } + ] testList "customSingle" [ testTask "succeeds when a row is found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! doc = conn.customSingle $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id" @@ -549,8 +1233,8 @@ let integrationTests = testTask "succeeds when a row is not found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - + do! loadDocs conn + let! doc = conn.customSingle $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id" @@ -559,33 +1243,37 @@ let integrationTests = Expect.isNone doc "There should not have been a document returned" } ] - testList "customList" [ - testTask "succeeds when data is found" { + testList "customJsonSingle" [ + testTask "succeeds when a row is found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - - let! docs = conn.customList (Query.find SqliteDb.TableName) [] fromData - Expect.hasLength docs 5 "There should have been 5 documents returned" + do! loadDocs conn + + let! json = + conn.customJsonSingle + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id" + [ SqliteParameter("@id", "one") ] + jsonFromData + Expect.equal json JsonDocument.one "The JSON document is incorrect" } - testTask "succeeds when data is not found" { + testTask "succeeds when a row is not found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - - let! docs = - conn.customList - $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" - [ SqliteParameter("@value", 100) ] - fromData - Expect.isEmpty docs "There should have been no documents returned" + do! loadDocs conn + + let! json = + conn.customJsonSingle + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id" + [ SqliteParameter("@id", "eighty") ] + jsonFromData + Expect.equal json "{}" "There should not have been a document returned" } ] testList "customNonQuery" [ testTask "succeeds when operating on data" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn do! conn.customNonQuery $"DELETE FROM {SqliteDb.TableName}" [] @@ -595,7 +1283,7 @@ let integrationTests = testTask "succeeds when no data matches where clause" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn do! conn.customNonQuery $"DELETE FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" @@ -608,7 +1296,7 @@ let integrationTests = testTask "customScalar succeeds" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + let! nbr = conn.customScalar "SELECT 5 AS test_value" [] _.GetInt32(0) Expect.equal nbr 5 "The query should have returned the number 5" } diff --git a/src/Tests/SqliteTests.fs b/src/Tests/SqliteTests.fs index c2c3f58..cd81442 100644 --- a/src/Tests/SqliteTests.fs +++ b/src/Tests/SqliteTests.fs @@ -1,5 +1,7 @@ module SqliteTests +open System.IO +open System.IO.Pipelines open System.Text.Json open BitBadger.Documents open BitBadger.Documents.Sqlite @@ -135,6 +137,17 @@ let loadDocs () = backgroundTask { for doc in testDocuments do do! insert SqliteDb.TableName doc } +/// Set up a stream writer for a test +let writeStream (stream: Stream) = + PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + +/// Get the text of the given stream +let streamText (stream: Stream) = + stream.Position <- 0L + use reader = new StreamReader(stream) + reader.ReadToEnd() + + /// Integration tests for the Configuration module of the SQLite library let configurationTests = testList "Configuration" [ test "useConnectionString / connectionString succeed" { @@ -151,6 +164,89 @@ let configurationTests = testList "Configuration" [ /// Integration tests for the Custom module of the SQLite library let customTests = testList "Custom" [ + testList "list" [ + testTask "succeeds when data is found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! docs = Custom.list (Query.find SqliteDb.TableName) [] fromData + Expect.hasCountOf docs 5u (fun _ -> true) "There should have been 5 documents returned" + } + testTask "succeeds when data is not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! docs = + Custom.list + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" + [ SqliteParameter("@value", 100) ] + fromData + Expect.isEmpty docs "There should have been no documents returned" + } + ] + testList "jsonArray" [ + testTask "succeeds when data is found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Custom.jsonArray (Query.find SqliteDb.TableName) [] jsonFromData + Expect.stringStarts json "[" "The JSON array should have started with `[`" + Expect.stringContains json JsonDocument.one "Document ID `one` should have been found" + Expect.stringContains json JsonDocument.two "Document ID `two` should have been found" + Expect.stringContains json JsonDocument.three "Document ID `three` should have been found" + Expect.stringContains json JsonDocument.four "Document ID `four` should have been found" + Expect.stringContains json JsonDocument.five "Document ID `five` should have been found" + Expect.stringEnds json "]" "The JSON array should have ended with `[`" + } + testTask "succeeds when data is not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! docs = + Custom.jsonArray + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" + [ SqliteParameter("@value", 100) ] + jsonFromData + Expect.equal docs "[]" "There should have been no documents returned" + } + ] + testList "writeJsonArray" [ + testTask "succeeds when data is found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Custom.writeJsonArray (Query.find SqliteDb.TableName) [] writer jsonFromData + let json = streamText stream + Expect.stringStarts json "[" "The JSON array should have started with `[`" + Expect.stringContains json JsonDocument.one "Document ID `one` should have been found" + Expect.stringContains json JsonDocument.two "Document ID `two` should have been found" + Expect.stringContains json JsonDocument.three "Document ID `three` should have been found" + Expect.stringContains json JsonDocument.four "Document ID `four` should have been found" + Expect.stringContains json JsonDocument.five "Document ID `five` should have been found" + Expect.stringEnds json "]" "The JSON array should have ended with `[`" + finally + writer.Complete() + } + testTask "succeeds when data is not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Custom.writeJsonArray + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" + [ SqliteParameter("@value", 100) ] + writer + jsonFromData + Expect.equal (streamText stream) "[]" "There should have been no documents returned" + finally + writer.Complete() + } + ] testList "single" [ testTask "succeeds when a row is found" { use! db = SqliteDb.BuildDb() @@ -176,24 +272,28 @@ let customTests = testList "Custom" [ Expect.isNone doc "There should not have been a document returned" } ] - testList "list" [ - testTask "succeeds when data is found" { + testList "jsonSingle" [ + testTask "succeeds when a row is found" { use! db = SqliteDb.BuildDb() do! loadDocs () - let! docs = Custom.list (Query.find SqliteDb.TableName) [] fromData - Expect.hasCountOf docs 5u (fun _ -> true) "There should have been 5 documents returned" + let! json = + Custom.jsonSingle + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id" + [ SqliteParameter("@id", "one") ] + jsonFromData + Expect.equal json JsonDocument.one "The JSON document is incorrect" } - testTask "succeeds when data is not found" { + testTask "succeeds when a row is not found" { use! db = SqliteDb.BuildDb() do! loadDocs () - let! docs = - Custom.list - $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" - [ SqliteParameter("@value", 100) ] - fromData - Expect.isEmpty docs "There should have been no documents returned" + let! json = + Custom.jsonSingle + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id" + [ SqliteParameter("@id", "eighty") ] + jsonFromData + Expect.equal json "{}" "There should not have been a document returned" } ] testList "nonQuery" [ @@ -653,6 +753,533 @@ let findTests = testList "Find" [ ] ] +/// Verify a JSON array begins with "[" and ends with "]" +let private verifyBeginEnd json = + Expect.stringStarts json "[" "The array should have started with `[`" + Expect.stringEnds json "]" "The array should have ended with `]`" + +/// Verify an empty JSON array +let private verifyEmpty json = + Expect.equal json "[]" "There should be no documents returned" + +/// Verify an empty JSON document +let private verifyNoDoc json = + Expect.equal json "{}" "There should be no document returned" + +/// Verify the presence of any of the given documents in the given JSON +let private verifyAny (json: string) (docs: string list) = + match docs |> List.tryFind json.Contains with + | Some _ -> () + | None -> + let theDocs = docs |> String.concat " | " + Expect.isTrue false $"Could not find any of |{theDocs}| in {json}" + +/// Integration tests for the Json module of the SQLite library +let jsonTests = testList "Json" [ + testList "all" [ + testTask "succeeds when there is data" { + use! db = SqliteDb.BuildDb() + + do! insert SqliteDb.TableName { Foo = "one"; Bar = "two" } + do! insert SqliteDb.TableName { Foo = "three"; Bar = "four" } + do! insert SqliteDb.TableName { Foo = "five"; Bar = "six" } + + let! json = Json.all SqliteDb.TableName + verifyBeginEnd json + Expect.stringContains json """{"Foo":"one","Bar":"two"}""" "The first document was not found" + Expect.stringContains json """{"Foo":"three","Bar":"four"}""" "The second document was not found" + Expect.stringContains json """{"Foo":"five","Bar":"six"}""" "The third document was not found" + } + testTask "succeeds when there is no data" { + use! db = SqliteDb.BuildDb() + let! json = Json.all SqliteDb.TableName + verifyEmpty json + } + ] + testList "allOrdered" [ + testTask "succeeds when ordering numerically" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.allOrdered SqliteDb.TableName [ Field.Named "n:NumValue" ] + Expect.equal + json + $"[{JsonDocument.one},{JsonDocument.three},{JsonDocument.two},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + } + testTask "succeeds when ordering numerically descending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.allOrdered SqliteDb.TableName [ Field.Named "n:NumValue DESC" ] + Expect.equal + json + $"[{JsonDocument.five},{JsonDocument.four},{JsonDocument.two},{JsonDocument.three},{JsonDocument.one}]" + "The documents were not ordered correctly" + } + testTask "succeeds when ordering alphabetically" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.allOrdered SqliteDb.TableName [ Field.Named "Id DESC" ] + Expect.equal + json + $"[{JsonDocument.two},{JsonDocument.three},{JsonDocument.one},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + } + ] + testList "byId" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.byId SqliteDb.TableName "two" + Expect.equal json JsonDocument.two "The incorrect document was returned" + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.byId SqliteDb.TableName "three hundred eighty-seven" + verifyNoDoc json + } + ] + testList "byFields" [ + testTask "succeeds when documents are found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.byFields SqliteDb.TableName Any [ Field.Greater "NumValue" 15 ] + verifyBeginEnd json + Expect.stringContains json JsonDocument.four "Document `four` should have been returned" + Expect.stringContains json JsonDocument.five "Document `five` should have been returned" + } + testTask "succeeds when documents are found using IN with numeric field" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.byFields SqliteDb.TableName All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + Expect.equal json $"[{JsonDocument.three}]" "There should have been one document returned" + } + testTask "succeeds when documents are not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.byFields SqliteDb.TableName Any [ Field.Greater "NumValue" 100 ] + verifyEmpty json + } + testTask "succeeds for InArray when matching documents exist" { + use! db = SqliteDb.BuildDb() + do! Definition.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert SqliteDb.TableName doc + + let! json = Json.byFields SqliteDb.TableName All [ Field.InArray "Values" SqliteDb.TableName [ "c" ] ] + verifyBeginEnd json + Expect.stringContains + json """{"Id":"first","Values":["a","b","c"]}""" "Document `first` should have been returned" + Expect.stringContains + json """{"Id":"second","Values":["c","d","e"]}""" "Document `second` should have been returned" + } + testTask "succeeds for InArray when no matching documents exist" { + use! db = SqliteDb.BuildDb() + do! Definition.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert SqliteDb.TableName doc + + let! json = Json.byFields SqliteDb.TableName All [ Field.InArray "Values" SqliteDb.TableName [ "j" ] ] + verifyEmpty json + } + ] + testList "byFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.byFieldsOrdered SqliteDb.TableName Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id" ] + Expect.equal json $"[{JsonDocument.five},{JsonDocument.four}]" "Incorrect documents were returned" + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = + Json.byFieldsOrdered SqliteDb.TableName Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id DESC" ] + Expect.equal json $"[{JsonDocument.four},{JsonDocument.five}]" "Incorrect documents were returned" + } + testTask "succeeds when sorting case-sensitively" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = + Json.byFieldsOrdered SqliteDb.TableName All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "Value" ] + Expect.equal + json $"[{JsonDocument.three},{JsonDocument.one},{JsonDocument.two}]" "Documents not ordered correctly" + } + testTask "succeeds when sorting case-insensitively" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = + Json.byFieldsOrdered + SqliteDb.TableName All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "i:Value" ] + Expect.equal + json $"[{JsonDocument.three},{JsonDocument.two},{JsonDocument.one}]" "Documents not ordered correctly" + } + ] + testList "firstByFields" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.firstByFields SqliteDb.TableName Any [ Field.Equal "Value" "another" ] + Expect.equal json JsonDocument.two "The incorrect document was returned" + } + testTask "succeeds when multiple documents are found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.firstByFields SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] + Expect.notEqual json "{}" "There should have been a document returned" + verifyAny json [ JsonDocument.two; JsonDocument.four ] + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.firstByFields SqliteDb.TableName Any [ Field.Equal "Value" "absent" ] + verifyNoDoc json + } + ] + testList "firstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = + Json.firstByFieldsOrdered + SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar" ] + Expect.equal json JsonDocument.two "An incorrect document was returned" + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = + Json.firstByFieldsOrdered + SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar DESC" ] + Expect.equal json JsonDocument.four "An incorrect document was returned" + } + ] + testList "writeAll" [ + testTask "succeeds when there is data" { + use! db = SqliteDb.BuildDb() + + do! insert SqliteDb.TableName { Foo = "one"; Bar = "two" } + do! insert SqliteDb.TableName { Foo = "three"; Bar = "four" } + do! insert SqliteDb.TableName { Foo = "five"; Bar = "six" } + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeAll SqliteDb.TableName writer + let json = streamText stream + verifyBeginEnd json + Expect.stringContains json """{"Foo":"one","Bar":"two"}""" "The first document was not found" + Expect.stringContains json """{"Foo":"three","Bar":"four"}""" "The second document was not found" + Expect.stringContains json """{"Foo":"five","Bar":"six"}""" "The third document was not found" + finally + writer.Complete() + } + testTask "succeeds when there is no data" { + use! db = SqliteDb.BuildDb() + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeAll SqliteDb.TableName writer + verifyEmpty (streamText stream) + finally + writer.Complete() + } + ] + testList "writeAllOrdered" [ + testTask "succeeds when ordering numerically" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.one},{JsonDocument.three},{JsonDocument.two},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + finally + writer.Complete() + } + testTask "succeeds when ordering numerically descending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue DESC" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.five},{JsonDocument.four},{JsonDocument.two},{JsonDocument.three},{JsonDocument.one}]" + "The documents were not ordered correctly" + finally + writer.Complete() + } + testTask "succeeds when ordering alphabetically" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeAllOrdered SqliteDb.TableName writer [ Field.Named "Id DESC" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.two},{JsonDocument.three},{JsonDocument.one},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + finally + writer.Complete() + } + ] + testList "writeById" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeById SqliteDb.TableName writer "two" + Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + finally + writer.Complete() + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeById SqliteDb.TableName writer "three hundred eighty-seven" + verifyNoDoc (streamText stream) + finally + writer.Complete() + } + ] + testList "writeByFields" [ + testTask "succeeds when documents are found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] + let json = streamText stream + verifyBeginEnd json + Expect.stringContains json JsonDocument.four "Document `four` should have been returned" + Expect.stringContains json JsonDocument.five "Document `five` should have been returned" + finally + writer.Complete() + } + testTask "succeeds when documents are found using IN with numeric field" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFields SqliteDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + Expect.equal + (streamText stream) $"[{JsonDocument.three}]" "There should have been one document returned" + finally + writer.Complete() + } + testTask "succeeds when documents are not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 100 ] + verifyEmpty (streamText stream) + finally + writer.Complete() + } + testTask "succeeds for InArray when matching documents exist" { + use! db = SqliteDb.BuildDb() + do! Definition.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert SqliteDb.TableName doc + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFields + SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "c" ] ] + let json = streamText stream + verifyBeginEnd json + Expect.stringContains + json """{"Id":"first","Values":["a","b","c"]}""" "Document `first` should have been returned" + Expect.stringContains + json """{"Id":"second","Values":["c","d","e"]}""" "Document `second` should have been returned" + finally + writer.Complete() + } + testTask "succeeds for InArray when no matching documents exist" { + use! db = SqliteDb.BuildDb() + do! Definition.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert SqliteDb.TableName doc + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFields + SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "j" ] ] + verifyEmpty (streamText stream) + finally + writer.Complete() + } + ] + testList "writeByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id" ] + Expect.equal + (streamText stream) $"[{JsonDocument.five},{JsonDocument.four}]" "Incorrect documents were returned" + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id DESC" ] + Expect.equal + (streamText stream) $"[{JsonDocument.four},{JsonDocument.five}]" "Incorrect documents were returned" + finally + writer.Complete() + } + testTask "succeeds when sorting case-sensitively" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFieldsOrdered + SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "Value" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.three},{JsonDocument.one},{JsonDocument.two}]" + "Documents not ordered correctly" + finally + writer.Complete() + } + testTask "succeeds when sorting case-insensitively" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeByFieldsOrdered + SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "i:Value" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.three},{JsonDocument.two},{JsonDocument.one}]" + "Documents not ordered correctly" + finally + writer.Complete() + } + ] + testList "writeFirstByFields" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "another" ] + Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + finally + writer.Complete() + } + testTask "succeeds when multiple documents are found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] + let json = streamText stream + Expect.notEqual json "{}" "There should have been a document returned" + verifyAny json [ JsonDocument.two; JsonDocument.four ] + finally + writer.Complete() + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "absent" ] + verifyNoDoc (streamText stream) + finally + writer.Complete() + } + ] + testList "writeFirstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar" ] + Expect.equal (streamText stream) JsonDocument.two "An incorrect document was returned" + finally + writer.Complete() + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + let writer = writeStream stream + try + do! Json.writeFirstByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar DESC" ] + Expect.equal (streamText stream) JsonDocument.four "An incorrect document was returned" + finally + writer.Complete() + } + ] +] + /// Integration tests for the Update module of the SQLite library let updateTests = testList "Update" [ testList "byId" [ @@ -682,7 +1309,7 @@ let updateTests = testList "Update" [ use! db = SqliteDb.BuildDb() do! loadDocs () - do! Update.byFunc SqliteDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + do! Update.byFunc SqliteDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } let! after = Find.byId SqliteDb.TableName "one" Expect.isSome after "There should have been a document returned post-update" Expect.equal @@ -697,7 +1324,7 @@ let updateTests = testList "Update" [ Expect.isEmpty before "There should have been no documents returned" // This not raising an exception is the test - do! Update.byFunc SqliteDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + do! Update.byFunc SqliteDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } } ] ] @@ -854,6 +1481,7 @@ let all = testList "Sqlite" [ countTests existsTests findTests + jsonTests updateTests patchTests removeFieldsTests diff --git a/src/Tests/Types.fs b/src/Tests/Types.fs index bec9b16..16bd9a9 100644 --- a/src/Tests/Types.fs +++ b/src/Tests/Types.fs @@ -26,6 +26,22 @@ type JsonDocument = NumValue: int Sub: SubDocument option } +module JsonDocument = + /// The JSON for document ID `one` + let one = """{"Id":"one","Value":"FIRST!","NumValue":0,"Sub":null}""" + + /// The JSON for document ID `two` + let two = """{"Id":"two","Value":"another","NumValue":10,"Sub":{"Foo":"green","Bar":"blue"}}""" + + /// The JSON for document ID `three` + let three = """{"Id":"three","Value":"","NumValue":4,"Sub":null}""" + + /// The JSON for document ID `four` + let four = """{"Id":"four","Value":"purple","NumValue":17,"Sub":{"Foo":"green","Bar":"red"}}""" + + /// The JSON for document ID `five` + let five = """{"Id":"five","Value":"purple","NumValue":18,"Sub":null}""" + /// An empty JsonDocument let emptyDoc = { Id = ""; Value = ""; NumValue = 0; Sub = None } diff --git a/toc.yml b/toc.yml new file mode 100644 index 0000000..630fb84 --- /dev/null +++ b/toc.yml @@ -0,0 +1,4 @@ +- name: Docs + href: docs/getting-started.md +- name: API + href: api/ \ No newline at end of file